1
0

updating case studies for comparative analysis

This commit is contained in:
Matthew Gaughan 2025-04-30 17:39:32 -07:00
parent 80c6e2ffba
commit b9b93fddc1
17 changed files with 532 additions and 322 deletions

View File

@ -122,3 +122,32 @@ cd final_data
ls
cd metadata
ls
cd ..cd dkdsjlksdfjlkjlksdfjlkfsdjlklll
cd ..
ls
cd ..
ls
cd . .
cd ..
ls
cd mw-repo-lifecycles
ls
ls case3
ls
cd ..
ls
cd case4
cd case3
ls
mkdir 043025-stale-data
mv event_0415_mediawiki_core_weekly_commit_count_data.csv 043025-stale-data/
mv event_0422_mediawiki_core_weekly_count.csv 043025-stale-data/
ls
cd ..
ls
cd case1`
cd case1
lds
ls
cd ../case2
ls

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.5 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.4 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 734 KiB

View File

@ -5,7 +5,7 @@ library(tidyr)
library(purrr)
library(stringr)
https_commit_fp <- "/mmfs1/gscratch/comdata/users/mjilg/mw-repo-lifecycles/case1/extensions_visualeditor_commits.csv"
https_commit_fp <- "/mmfs1/gscratch/comdata/users/mjilg/mw-repo-lifecycles/case2/mediawiki_core_commits.csv"
contains_http_but_not_url <- function(text) {
if (is.na(text)) {
@ -28,7 +28,7 @@ contains_http_but_not_url <- function(text) {
if (str_detect(word, "tls")){
return(TRUE)
}
if (startsWith(word, "cert")){
if (startsWith(word, "cert") && !startsWith(word, "certain")){
return(TRUE)
}
}
@ -207,7 +207,7 @@ transform_relevant_commit_data <- function(filepath){
# TODO: this is project/event specific
event_date <- as.Date("2015-07-02")
event_date <- as.Date("2013-08-28")
#event_date <- as.Date("2013-07-01")
#event_date <- as.Date("2013-04-25")
#event_date <- as.Date("2012-12-11")
@ -231,13 +231,13 @@ transform_relevant_commit_data <- function(filepath){
#drop out data from ''before'' the release process
df <- df |>
filter(commit_date >= as.Date("2015-04-01"))
filter(commit_date >= as.Date("2011-09-03"))
#we are looking at weekly data, 6m before and 6m after
#start_date <- event_date %m-% months(6)
calculated_start_date <- event_date %m-% months(12)
start_date <- max(calculated_start_date, oldest_commit_date)
end_date <- event_date %m+% months(12)
#calculated_start_date <- event_date %m-% months(12)
start_date <- as.Date('2011-09-03')
end_date <-as.Date('2013-11-27')
#getting the relative weeks to the publication date
relative_week <- function(date, ref_date) {
@ -260,7 +260,7 @@ transform_relevant_commit_data <- function(filepath){
# list all author_emails with >5 commits
# for big df: if author not in the list, 'new' author
old_author_list <- df |>
filter(commit_date < as.Date("2015-06-01"))|>
filter(commit_date > as.Date("2011-09-03") & commit_date < as.Date("2013-08-01"))|>
group_by(author_email) |>
summarise(commit_count = n()) |>
filter(commit_count > 5) |>
@ -296,7 +296,7 @@ transform_relevant_commit_data <- function(filepath){
#now cut out the commit data that we don't care about
df <- df |>
filter(author_email != "jenkins-bot@gerrit.wikimedia.org")
filter(author_email != "jenkins-bot@gerrit.wikimedia.org")
#in order:
# - we group by project, week, ages
@ -357,14 +357,14 @@ transform_relevant_commit_data <- function(filepath){
# ) |>
weekly_commits <- weekly_commits |>
filter(relative_week >= (-14) & relative_week <= 52 )
filter(relative_week >= (-103) & relative_week <= 13 )
#gracefully exit
return(weekly_commits)
}
transformed <- transform_commit_data(https_commit_fp)
output_filepath <-"/mmfs1/gscratch/comdata/users/mjilg/mw-repo-lifecycles/case1/event_0421_extensions_ve_weekly_count.csv"
transformed <- transform_relevant_commit_data(https_commit_fp)
output_filepath <-"/mmfs1/gscratch/comdata/users/mjilg/mw-repo-lifecycles/case2/event_0430_mediawiki_core_weekly_count.csv"
write.csv(transformed, output_filepath, row.names = FALSE)

View File

@ -1,5 +1,5 @@
library(tidyverse)
count_data_fp <-"/mmfs1/gscratch/comdata/users/mjilg/mw-repo-lifecycles/case1/event_0421_extensions_ve_weekly_count.csv"
count_data_fp <-"/mmfs1/gscratch/comdata/users/mjilg/mw-repo-lifecycles/case2/event_0430_mediawiki_core_weekly_count.csv"
input_df <- read.csv(count_data_fp, header = TRUE)
input_df$nonbot_commit_count <- input_df$commit_count - input_df$bot_commit_count
@ -9,31 +9,29 @@ library(scales)
library(ggplot2)
long_df <- input_df |>
tidyr::pivot_longer(cols = c(nonbot_commit_count, unaff_commit_count, wikimedia_commit_count),
tidyr::pivot_longer(cols = c(unaff_commit_count, wikimedia_commit_count),
names_to = "commit_type",
values_to = "lengthened_commit_count")
affiliationColors <-
setNames( c('black','#5da2d8', '#c7756a')
,c("nonbot_commit_count","unaff_commit_count", "wikimedia_commit_count"))
setNames( c('#5da2d8', '#c7756a')
,c("unaff_commit_count", "wikimedia_commit_count"))
commit_authors <- long_df |>
ggplot(aes(x=relative_week,
y=lengthened_commit_count,
color=factor(commit_type))) +
geom_point() +
geom_line() +
labs(x = "Relative Week", y = "Commits", color="Commit Type") +
scale_color_manual(values = affiliationColors,
labels = c("nonbot_commit_count" = "Total Nonbot Commits",
"unaff_commit_count" = "Unaffiliated Commits",
fill=factor(commit_type))) +
geom_col(position='dodge') +
labs(x = "Relative Week", y = "Commits", fill="Commit Type") +
scale_fill_manual(values = affiliationColors,
labels = c("unaff_commit_count" = "Unaffiliated Commits",
"wikimedia_commit_count" = "WMF Commits")) +
ggtitle("Total VE Commits Around Opt-out Deployment By Affiliation") +
ggtitle("Total Relevant Core Commits Around HTTPS-as-default Deployment") +
theme_bw() +
theme(legend.position = "top")
commit_authors
ggsave(filename = "0421-ve-commits.png", plot = commit_authors, width = 12, height = 9, dpi = 800)
ggsave(filename = "ww-c2-0430-commits.png", plot = commit_authors, width = 12, height = 9, dpi = 800)
# new affiliation things
@ -52,23 +50,21 @@ new_authors_long_df <- input_df |>
new_unaff_authors <- new_authors_long_df |>
ggplot(aes(x=relative_week,
y=lengthened_commit_count,
linetype=commit_seniority)) +
geom_point(color = '#5da2d8') +
geom_line(color='#5da2d8') +
labs(x = "Relative Week", y = "Commits", linetype="Commit Seniority ('New' contributors <= 5 commits before 06-06-2013)") +
scale_linetype_manual(
values = c("returning_unaff_commit_count" = "solid",
"unaff_new_commit_count" = "dotted"),
fill=commit_seniority)) +
geom_col(color = '#5da2d8', position='dodge') +
labs(x = "Relative Week", y = "Commits", fill="Commit Seniority ('New' contributors <= 5 commits before 06-06-2013)") +
scale_fill_manual(values = c("returning_unaff_commit_count" = "#FFC107", # Color for "Returning Contributors"
"unaff_new_commit_count" = "#004D40"),
labels = c("returning_unaff_commit_count" = "Returning Contributors",
"unaff_new_commit_count" = "New Contributors")
) +
ggtitle("Unaffiliated VE Commits Surrounding Opt-out Deployment") +
ggtitle("Unaffiliated Relevant Core Commits Around HTTPS-as-default Deployment") +
theme_bw() +
theme(legend.position = "top")
new_unaff_authors
ggsave(filename = "0421-ve-spike-commits.png", plot = new_unaff_authors, width = 12, height = 9, dpi = 800)
ggsave(filename = "ww-c2-0430-unaff-commit-spike.png", plot = new_unaff_authors, width = 12, height = 9, dpi = 800)
new_authors <- long_df |>
ggplot(aes(x=relative_week,

View File

@ -0,0 +1,206 @@
#library(tidyverse)
library(dplyr)
library(lubridate)
library(tidyr)
library(purrr)
library(stringr)
https_commit_fp <- "/mmfs1/gscratch/comdata/users/mjilg/mw-repo-lifecycles/case2/mediawiki_core_commits.csv"
contains_http_but_not_url <- function(text) {
if (is.na(text)) {
return(FALSE)
}
# Split text by whitespace and check each word
words <- str_split(text, "\\s+")[[1]]
for (word in words) {
if (!str_detect(word,"://")){
#http
if (str_detect(word, "http")){
return(TRUE)
}
if (str_detect(word, "login")){
return(TRUE)
}
if (str_detect(word, "ssl")){
return(TRUE)
}
if (str_detect(word, "tls")){
return(TRUE)
}
if (startsWith(word, "cert") && !startsWith(word, "certain")){
return(TRUE)
}
}
}
return(FALSE)
}
transform_relevant_commit_data <- function(filepath){
#basic, loading in the file
df = read.csv(filepath, header = TRUE)
temp_df <- df
dir_path = dirname(filepath)
file_name = basename(filepath)
# TODO: this is project/event specific
event_date <- as.Date("2013-08-28")
#event_date <- as.Date("2013-07-01")
#event_date <- as.Date("2013-04-25")
#event_date <- as.Date("2012-12-11")
# isolate project id
project_id <- sub("_.*$", "", file_name)
#make sure the dates are formatted correctly and state the project_id
df <- df |>
mutate(commit_date = ymd_hms(commit_date)) |>
mutate(project_id = project_id)
#get information about project age either in the "present"
#or at the time of first commit
oldest_commit_date <- min(as.Date(df$commit_date))
project_age <- as.numeric(as.Date("2025-02-10") - oldest_commit_date)
#add that to the data
df <- df |>
mutate(age = project_age)
#drop out data from ''before'' the release process
df <- df |>
filter(commit_date >= as.Date("2011-09-03"))
#we are looking at weekly data, 6m before and 6m after
#start_date <- event_date %m-% months(6)
#calculated_start_date <- event_date %m-% months(12)
start_date <- as.Date('2011-09-03')
end_date <-as.Date('2015-10-03')
#getting the relative weeks to the publication date
relative_week <- function(date, ref_date) {
as.integer(as.numeric(difftime(date, ref_date, units = "days")) %/% 7)
}
df <- df |>
mutate(relative_week = relative_week(commit_date, event_date))|>
mutate(mentions_http = if_else(sapply(message, contains_http_but_not_url), TRUE, FALSE))
# arrange(relative_week) |>
# group_by(author_email) |>
# mutate(new_author = ifelse(row_number() <= 5, 1, 0),
# new_author_wmf = if_else(grepl("@wikimedia", author_email), new_author, 0),
# new_author_unaff = if_else(!grepl("@wikimedia", author_email), new_author, 0)) |>
# ungroup()
# cut the df to all before 06-01-2015
# group by author_email
# list all author_emails with >5 commits
# for big df: if author not in the list, 'new' author
old_author_list <- df |>
filter(commit_date > as.Date("2011-09-03") & commit_date < as.Date("2013-08-01"))|>
group_by(author_email) |>
summarise(commit_count = n()) |>
filter(commit_count > 5) |>
pull(author_email)
# Label authors as 'new' if they are not in the old_author_list
df <- df |>
mutate(new_author = ifelse(author_email %in% old_author_list, 0, 1),
new_author_wmf = if_else(grepl("@wikimedia", author_email),
new_author, 0),
new_author_unaff = if_else(!grepl("@wikimedia", author_email) &
!grepl("l10n-bot@translatewiki.net|tools.libraryupgrader@tools.wmflabs.org", author_email),
new_author, 0))
#filler for when there are weeks without commits
all_weeks <- seq(relative_week(start_date, event_date), relative_week(end_date, event_date))
complete_weeks_df <- expand.grid(relative_week = all_weeks,
project_id = project_id,
age = project_age)
#for each week, get the list of unique authors that committed
#cumulative_authors <- df %>%
# arrange(relative_week) %>%
# group_by(relative_week) %>%
# summarize(cumulative_author_emails = list(unique(author_email)), .groups = 'drop')
#same for each committer
#cumulative_committers <- df %>%
# arrange(relative_week) %>%
# group_by(relative_week) %>%
# summarize(cumulative_committer_emails = list(unique(committer_email)), .groups = 'drop')
#now cut out the commit data that we don't care about
df <- df |>
filter(author_email != "jenkins-bot@gerrit.wikimedia.org") |>
filter(author_email != "l10n-bot@translatewiki.net") |>
filter(author_email != "tools.libraryupgrader@tools.wmflabs.org")
#in order:
# - we group by project, week, ages
# - and we summarize commit and authorship details
# - we then fill in information for missingness
# - and add in vars for before/after
# - and weekly index
weekly_commits <- df |>
group_by(project_id, relative_week, age) |>
summarise(commit_count = n(),
author_emails = list(unique(author_email)),
committer_emails = list(unique(committer_email)),
mediawiki_dev_commit_count = sum(grepl("@users.mediawiki.org", author_email)),
wikimedia_commit_count = sum(grepl("@wikimedia", author_email)),
wikia_commit_count = sum(grepl("@wikia-inc.com", author_email)),
bot_commit_count = sum(grepl("l10n-bot@translatewiki.net|tools.libraryupgrader@tools.wmflabs.org", author_email)),
wmf_new_commit_count = sum(new_author_wmf),
unaff_new_commit_count = sum(new_author_unaff),
relevant_commits = sum(mentions_http),
.groups = 'drop') |>
right_join(complete_weeks_df, by=c("relative_week", "project_id", "age")) |>
replace_na(list(commit_count = 0)) |>
replace_na(list(wikimedia_commit_count = 0)) |>
replace_na(list(l10n_commit_count = 0)) |>
replace_na(list(jenkins_commit_count = 0)) |>
replace_na(list(mediawiki_dev_commit_count = 0)) |>
replace_na(list(wikia_commit_count = 0)) |>
replace_na(list(bot_commit_count = 0)) |>
replace_na(list(wmf_new_commit_count = 0)) |>
replace_na(list(unaff_new_commit_count = 0)) |>
replace_na(list(relevant_commits = 0)) |>
mutate(before_after = if_else(relative_week < 0, 0, 1)) |>
select(-author_emails, -committer_emails)
weekly_commits <- weekly_commits |>
filter(relative_week >= (-103) & relative_week <= 109 )
#gracefully exit
return(weekly_commits)
}
transformed <- transform_relevant_commit_data(https_commit_fp)
transformed$irrelevant_commit = transformed$commit_count - transformed$relevant_commits
long_df <- transformed|>
tidyr::pivot_longer(cols = c(irrelevant_commit, relevant_commits),
names_to = "commit_relevance",
values_to = "lengthened_commit_count")
relevant_https_commits <- long_df |>
ggplot(aes(x=relative_week,
y=lengthened_commit_count,
fill=commit_relevance)) +
geom_col(position = "dodge", width = 0.7) +
labs(x = "Relative Week", y = "Commits", fill="Commit relevance") +
scale_fill_manual(values = c("irrelevant_commit" = "#E1BE6A", # Color for "Returning Contributors"
"relevant_commits" = "#40B0A6"),
labels = c("irrelevant_commit" = "Irrelevant Commits",
"relevant_commits" = "Relevant Commits")
) +
ggtitle("Commits to MW-Core, 09-03-2011 to 10-03-2015, by relevance to HTTP/s feature deployments") +
theme_bw() +
theme(legend.position = "top")
relevant_https_commits
ggsave(filename = "ww-c2c3-relevance-viz.png", plot = relevant_https_commits, width = 12, height = 9, dpi = 800)

View File

@ -1,17 +0,0 @@
1. SSH tunnel from your workstation using the following command:
ssh -N -L 8787:n3439:46227 mjilg@klone.hyak.uw.edu
and point your web browser to http://localhost:8787
2. log in to RStudio Server using the following credentials:
user: mjilg
password: AJ9ua2VJPYQLsa6g6Fbq
When done using RStudio Server, terminate the job by:
1. Exit the RStudio Session ("power" button in the top right corner of the RStudio window)
2. Issue the following command on the login node:
scancel -f 25494157

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

BIN
ww-c2c3-relevance-viz.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 420 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 377 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 420 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 372 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 413 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 370 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 406 KiB