A few suggestions for the python script:
- using format strings (f-strings) is a nice way in python to build strings using variables. - you can read and process a file in one pass if you iterate over the open file itself instead of reading it into a variable and then looping - i had to change your strip code when i stopped using csv reader - my python linter and auto-formater hate non-indendent comments - i added a few lines to print cases where we don't get Ok responses.
This commit is contained in:
parent
c54d8ba28a
commit
49c3203d78
@ -20,7 +20,6 @@ import datetime
|
|||||||
#import feather
|
#import feather
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def parse_args():
|
def parse_args():
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='Call the views API repeatedly.')
|
parser = argparse.ArgumentParser(description='Call the views API repeatedly.')
|
||||||
@ -49,35 +48,32 @@ def main():
|
|||||||
|
|
||||||
|
|
||||||
articleList = []
|
articleList = []
|
||||||
#1 Load up the list of article names
|
#1 Load up the list of article names
|
||||||
|
|
||||||
with open(articleFile, 'r') as infileHandle:
|
j_Out = f"{outputPath}dailyviews{queryDate}.json"
|
||||||
theInfile = csv.reader(infileHandle)
|
t_Out = f"{outputPath}dailyviews{queryDate}.tsv"
|
||||||
next(theInfile) #skip header
|
|
||||||
for currentLine in theInfile:
|
|
||||||
articleList.append(currentLine)
|
|
||||||
|
|
||||||
j_Out = outputPath + "dailyviews" + queryDate + ".json"
|
with open(articleFile, 'r') as infile:
|
||||||
t_Out = outputPath + "dailyviews" + queryDate + ".tsv"
|
next(infile) #skip header
|
||||||
|
articleList = infile
|
||||||
|
|
||||||
j = []
|
j = []
|
||||||
|
|
||||||
i = 0 #iterator to deal with end of file
|
#2 Repeatedly call the API with that list of names
|
||||||
|
|
||||||
#2 Repeatedly call the API with that list of names
|
for a in articleList:
|
||||||
|
a = a.strip("\"\n") #destringify
|
||||||
|
url= f"https://wikimedia.org/api/rest_v1/metrics/pageviews/per-article/en.wikipedia/all-access/all-agents/{a}/daily/{queryDate}/{queryDate}"
|
||||||
|
|
||||||
for a in articleList:
|
response = requests.get(url)
|
||||||
a = a[0] #destringify
|
if response.ok:
|
||||||
i = i+1
|
jd = json.loads(response.content)
|
||||||
url= "https://wikimedia.org/api/rest_v1/metrics/pageviews/per-article/en.wikipedia/all-access/all-agents/"
|
j.append(jd["items"][0])
|
||||||
url= url + a + "/daily/" + queryDate + "/" + queryDate #for now, single date at a time
|
time.sleep(.1)
|
||||||
response = requests.get(url)
|
else:
|
||||||
if response.ok:
|
print(f"Not ok response: {response.status_code} from {url}")
|
||||||
jd = json.loads(response.content)
|
|
||||||
j.append(jd["items"][0])
|
#3 Save results as a JSON and TSV
|
||||||
time.sleep(.1)
|
|
||||||
|
|
||||||
#3 Save results as a JSON and TSV
|
|
||||||
|
|
||||||
#all data in j now, make json file
|
#all data in j now, make json file
|
||||||
with open(j_Out, 'w') as j_outfile:
|
with open(j_Out, 'w') as j_outfile:
|
||||||
@ -89,8 +85,8 @@ def main():
|
|||||||
dw.writerows(j)
|
dw.writerows(j)
|
||||||
|
|
||||||
|
|
||||||
f_Out = outputPath + "dailyviews" + queryDate + ".feather"
|
# f_Out = outputPath + "dailyviews" + queryDate + ".feather"
|
||||||
#read the json back in and make a feather file?
|
# read the json back in and make a feather file?
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
Loading…
Reference in New Issue
Block a user