renaming example analysis directories

This commit is contained in:
aaronshaw 2020-04-01 19:12:45 -05:00
parent ff96d52cb9
commit 576d882c04
8 changed files with 63 additions and 0 deletions

View File

Before

Width:  |  Height:  |  Size: 48 KiB

After

Width:  |  Height:  |  Size: 48 KiB

View File

@ -0,0 +1,11 @@
"article","project","timestamp","views"
"201920_coronavirus_pandemic","en.wikipedia","2020033100",831879
"2020_coronavirus_pandemic_in_India","en.wikipedia","2020033100",323123
"201920_coronavirus_pandemic_by_country_and_territory","en.wikipedia","2020033100",315572
"2020_coronavirus_pandemic_in_the_United_States","en.wikipedia","2020033100",290535
"Coronavirus_disease_2019","en.wikipedia","2020033100",211391
"2020_coronavirus_pandemic_in_Italy","en.wikipedia","2020033100",209908
"Coronavirus","en.wikipedia","2020033100",188921
"USNS_Comfort_(T-AH-20)","en.wikipedia","2020033100",150422
"USNS_Comfort_(T-AH-20)","en.wikipedia","2020033100",150422
"WrestleMania_36","en.wikipedia","2020033100",137637
1 article project timestamp views
2 2019–20_coronavirus_pandemic en.wikipedia 2020033100 831879
3 2020_coronavirus_pandemic_in_India en.wikipedia 2020033100 323123
4 2019–20_coronavirus_pandemic_by_country_and_territory en.wikipedia 2020033100 315572
5 2020_coronavirus_pandemic_in_the_United_States en.wikipedia 2020033100 290535
6 Coronavirus_disease_2019 en.wikipedia 2020033100 211391
7 2020_coronavirus_pandemic_in_Italy en.wikipedia 2020033100 209908
8 Coronavirus en.wikipedia 2020033100 188921
9 USNS_Comfort_(T-AH-20) en.wikipedia 2020033100 150422
10 USNS_Comfort_(T-AH-20) en.wikipedia 2020033100 150422
11 WrestleMania_36 en.wikipedia 2020033100 137637

View File

@ -0,0 +1,52 @@
### COVID-19 Digital Observatory
### 2020-03-28
###
### Minimal example analysis file using pageview data
library(tidyverse)
library(scales)
### Import and cleanup one datafile from the observatory
DataURL <-
url("https://covid19.communitydata.science/datasets/wikipedia/digobs_covid19-wikipedia-enwiki_dailyviews-20200401.tsv")
views <-
read.table(DataURL, sep="\t", header=TRUE, stringsAsFactors=FALSE)
### Alternatively, uncomment and run if working locally with full git
### tree
###
### Identify data source directory and file
## DataDir <- ("../data/")
## DataFile <- ("dailyviews2020032600.tsv")
## related.searches.top <- read.table(paste(DataDir,DataFile, sep=""),
## sep="\t", header=TRUE,
## stringsAsFactors=FALSE)
### Cleanup and do the grouping with functions from the Tidyverse
### (see https://www.tidyverse.org for more info)
views <- views[,c("article", "project", "timestamp", "views")]
views$timestamp <- fct_explicit_na(views$timestamp)
### Sorts and groups at the same time
views.by.proj.date <- arrange(group_by(views, project, timestamp),
desc(views))
### Export just the top 10 by pageviews
write.table(head(views.by.proj.date, 10),
file="output/top10_views_by_project_date.csv", sep=",",
row.names=FALSE)
### A simple visualization
p <- ggplot(data=views.by.proj.date, aes(views))
## Density plot with log-transformed axis
p + geom_density() + scale_x_log10(labels=comma)