updating R scripts for commit analysis c2
This commit is contained in:
parent
3ac2d43a28
commit
752d4da9d5
@ -3,8 +3,26 @@ library(dplyr)
|
||||
library(lubridate)
|
||||
library(tidyr)
|
||||
library(purrr)
|
||||
library(stringr)
|
||||
|
||||
ve_commit_fp <- "/mmfs1/gscratch/comdata/users/mjilg/mw-repo-lifecycles/case2/mediawiki_wmfconfig_commits.csv"
|
||||
ve_commit_fp <- "/mmfs1/gscratch/comdata/users/mjilg/mw-repo-lifecycles/case2/mediawiki_core_commits.csv"
|
||||
|
||||
contains_http_but_not_url <- function(text) {
|
||||
if (is.na(text)) {
|
||||
return(FALSE)
|
||||
}
|
||||
# Split text by whitespace and check each word
|
||||
words <- str_split(text, "\\s+")[[1]]
|
||||
for (word in words) {
|
||||
if (word == "http" || word == "https") {
|
||||
return(TRUE)
|
||||
}
|
||||
if (str_detect(word, "http") && !str_detect(word, "^https?://") && !str_detect(word, "^http?://")) {
|
||||
return(TRUE)
|
||||
}
|
||||
}
|
||||
return(FALSE)
|
||||
}
|
||||
|
||||
transform_commit_data <- function(filepath){
|
||||
#basic, loading in the file
|
||||
@ -164,8 +182,169 @@ transform_commit_data <- function(filepath){
|
||||
return(weekly_commits)
|
||||
}
|
||||
|
||||
transformed <- transform_commit_data(ve_commit_fp)
|
||||
output_filepath <-"/mmfs1/gscratch/comdata/users/mjilg/mw-repo-lifecycles/case2/event_0403_mediawiki_wmfconfig_weekly_commit_count_data.csv"
|
||||
transform_relevant_commit_data <- function(filepath){
|
||||
#basic, loading in the file
|
||||
df = read.csv(filepath, header = TRUE)
|
||||
temp_df <- df
|
||||
dir_path = dirname(filepath)
|
||||
file_name = basename(filepath)
|
||||
|
||||
|
||||
# TODO: this is project/event specific
|
||||
event_date <- as.Date("2013-08-28")
|
||||
#event_date <- as.Date("2013-07-01")
|
||||
#event_date <- as.Date("2013-04-25")
|
||||
#event_date <- as.Date("2012-12-11")
|
||||
|
||||
# isolate project id
|
||||
project_id <- sub("_.*$", "", file_name)
|
||||
|
||||
#make sure the dates are formatted correctly and state the project_id
|
||||
df <- df |>
|
||||
mutate(commit_date = ymd_hms(commit_date)) |>
|
||||
mutate(project_id = project_id)
|
||||
|
||||
#get information about project age either in the "present"
|
||||
#or at the time of first commit
|
||||
oldest_commit_date <- min(as.Date(df$commit_date))
|
||||
project_age <- as.numeric(as.Date("2025-02-10") - oldest_commit_date)
|
||||
|
||||
#add that to the data
|
||||
df <- df |>
|
||||
mutate(age = project_age)
|
||||
|
||||
#we are looking at weekly data, 6m before and 6m after
|
||||
#start_date <- event_date %m-% months(6)
|
||||
calculated_start_date <- event_date %m-% months(12)
|
||||
start_date <- max(calculated_start_date, oldest_commit_date)
|
||||
end_date <- event_date %m+% months(12)
|
||||
|
||||
#getting the relative weeks to the publication date
|
||||
relative_week <- function(date, ref_date) {
|
||||
as.integer(as.numeric(difftime(date, ref_date, units = "days")) %/% 7)
|
||||
}
|
||||
|
||||
df <- df |>
|
||||
mutate(relative_week = relative_week(commit_date, event_date))|>
|
||||
mutate(mentions_http = if_else(sapply(message, contains_http_but_not_url), TRUE, FALSE)) |>
|
||||
filter(mentions_http == TRUE)
|
||||
# arrange(relative_week) |>
|
||||
# group_by(author_email) |>
|
||||
# mutate(new_author = ifelse(row_number() <= 5, 1, 0),
|
||||
# new_author_wmf = if_else(grepl("@wikimedia", author_email), new_author, 0),
|
||||
# new_author_unaff = if_else(!grepl("@wikimedia", author_email), new_author, 0)) |>
|
||||
# ungroup()
|
||||
|
||||
# cut the df to all before 06-01-2013
|
||||
# group by author_email
|
||||
# list all author_emails with >5 commits
|
||||
# for big df: if author not in the list, 'new' author
|
||||
old_author_list <- df |>
|
||||
filter(commit_date < as.Date("2013-08-01"))|>
|
||||
group_by(author_email) |>
|
||||
summarise(commit_count = n()) |>
|
||||
filter(commit_count > 5) |>
|
||||
pull(author_email)
|
||||
|
||||
# Label authors as 'new' if they are not in the old_author_list
|
||||
df <- df |>
|
||||
mutate(new_author = ifelse(author_email %in% old_author_list, 0, 1),
|
||||
new_author_wmf = if_else(grepl("@wikimedia", author_email),
|
||||
new_author, 0),
|
||||
new_author_unaff = if_else(!grepl("@wikimedia", author_email) &
|
||||
!grepl("l10n-bot@translatewiki.net|tools.libraryupgrader@tools.wmflabs.org", author_email),
|
||||
new_author, 0))
|
||||
|
||||
|
||||
#filler for when there are weeks without commits
|
||||
all_weeks <- seq(relative_week(start_date, event_date), relative_week(end_date, event_date))
|
||||
complete_weeks_df <- expand.grid(relative_week = all_weeks,
|
||||
project_id = project_id,
|
||||
age = project_age)
|
||||
|
||||
|
||||
#for each week, get the list of unique authors that committed
|
||||
#cumulative_authors <- df %>%
|
||||
# arrange(relative_week) %>%
|
||||
# group_by(relative_week) %>%
|
||||
# summarize(cumulative_author_emails = list(unique(author_email)), .groups = 'drop')
|
||||
#same for each committer
|
||||
#cumulative_committers <- df %>%
|
||||
# arrange(relative_week) %>%
|
||||
# group_by(relative_week) %>%
|
||||
# summarize(cumulative_committer_emails = list(unique(committer_email)), .groups = 'drop')
|
||||
|
||||
#now cut out the commit data that we don't care about
|
||||
df <- df |>
|
||||
filter(author_email != "jenkins-bot@gerrit.wikimedia.org")
|
||||
|
||||
#in order:
|
||||
# - we group by project, week, ages
|
||||
# - and we summarize commit and authorship details
|
||||
# - we then fill in information for missingness
|
||||
# - and add in vars for before/after
|
||||
# - and weekly index
|
||||
weekly_commits <- df |>
|
||||
group_by(project_id, relative_week, age) |>
|
||||
summarise(commit_count = n(),
|
||||
author_emails = list(unique(author_email)),
|
||||
committer_emails = list(unique(committer_email)),
|
||||
mediawiki_dev_commit_count = sum(grepl("@users.mediawiki.org", author_email)),
|
||||
wikimedia_commit_count = sum(grepl("@wikimedia", author_email)),
|
||||
wikia_commit_count = sum(grepl("@wikia-inc.com", author_email)),
|
||||
bot_commit_count = sum(grepl("l10n-bot@translatewiki.net|tools.libraryupgrader@tools.wmflabs.org", author_email)),
|
||||
wmf_new_commit_count = sum(new_author_wmf),
|
||||
unaff_new_commit_count = sum(new_author_unaff),
|
||||
.groups = 'drop') |>
|
||||
right_join(complete_weeks_df, by=c("relative_week", "project_id", "age")) |>
|
||||
replace_na(list(commit_count = 0)) |>
|
||||
replace_na(list(wikimedia_commit_count = 0)) |>
|
||||
replace_na(list(l10n_commit_count = 0)) |>
|
||||
replace_na(list(jenkins_commit_count = 0)) |>
|
||||
replace_na(list(mediawiki_dev_commit_count = 0)) |>
|
||||
replace_na(list(wikia_commit_count = 0)) |>
|
||||
replace_na(list(bot_commit_count = 0)) |>
|
||||
replace_na(list(wmf_new_commit_count = 0)) |>
|
||||
replace_na(list(unaff_new_commit_count = 0)) |>
|
||||
mutate(before_after = if_else(relative_week < 0, 0, 1)) |>
|
||||
select(-author_emails, -committer_emails)
|
||||
# then, to get the authorship details in
|
||||
# we check if the email data is present, if not we fill in blank
|
||||
# we bring in the information about authorship lists that we already had
|
||||
# then comparing the current week's author list with the previous week's cumulative list, or empty
|
||||
# ---- the length of that difference is the 'new' value
|
||||
# then we delete out the author list information
|
||||
#weekly_with_authorship <- weekly_commits |>
|
||||
# mutate(
|
||||
# author_emails = ifelse(is.na(author_emails), list(character()), author_emails),
|
||||
# committer_emails = ifelse(is.na(committer_emails), list(character()), committer_emails)
|
||||
# ) |>
|
||||
# left_join(cumulative_authors, by = "relative_week") |>
|
||||
# left_join(cumulative_committers, by = "relative_week") |>
|
||||
# mutate(new_author_emails = mapply(function(x, y) length(setdiff(x, y)), author_emails, lag(cumulative_author_emails, default = list(character(1)))),
|
||||
# new_committer_emails = mapply(function(x, y) length(setdiff(x, y)), committer_emails, lag(cumulative_committer_emails, default = list(character(1)))))
|
||||
|
||||
#weekly_with_authorship <- weekly_with_authorship |>
|
||||
# mutate(
|
||||
# wikimedia_author_emails = mapply(function(x) length(grep("@wikimedia|@users.mediawiki.org|@wikia-inc.com", x)), author_emails),
|
||||
# non_wikimedia_author_emails = mapply(function(x) length(x) - length(grep("@wikimedia|@users.mediawiki.org|@wikia-inc.com", x)), author_emails),
|
||||
# wikimedia_committer_emails = mapply(function(x) length(grep("@wikimedia|@users.mediawiki.org|@wikia-inc.com", x)), committer_emails),
|
||||
# non_wikimedia_committer_emails = mapply(function(x) length(x) - length(grep("@wikimedia|@users.mediawiki.org|@wikia-inc.com", x)), committer_emails),
|
||||
# new_wikimedia_authors = mapply(function(x, y) length(setdiff(grep("@wikimedia|@users.mediawiki.org|@wikia-inc.com", x, value = TRUE), grep("@wikimedia|@users.mediawiki.org|@wikia-inc.com", y, value = TRUE))), author_emails, lag(cumulative_author_emails, default = list(character(1)))),
|
||||
# new_non_wikimedia_authors = mapply(function(x, y) length(setdiff(x, y)) - length(setdiff(grep("@wikimedia|@users.mediawiki.org|@wikia-inc.com", x, value = TRUE), grep("@wikimedia|@users.mediawiki.org|@wikia-inc.com", y, value = TRUE))), author_emails, lag(cumulative_author_emails, default = list(character(1)))),
|
||||
# new_wikimedia_committers = mapply(function(x, y) length(setdiff(grep("@wikimedia|@users.mediawiki.org|@wikia-inc.com", x, value = TRUE), grep("@wikimedia|@users.mediawiki.org|@wikia-inc.com", y, value = TRUE))), committer_emails, lag(cumulative_committer_emails, default = list(character(1)))),
|
||||
# new_non_wikimedia_committers = mapply(function(x, y) length(setdiff(x, y)) - length(setdiff(grep("@wikimedia|@users.mediawiki.org|@wikia-inc.com", x, value = TRUE), grep("@wikimedia|@users.mediawiki.org|@wikia-inc.com", y, value = TRUE))), committer_emails, lag(cumulative_committer_emails, default = list(character(1))))
|
||||
# ) |>
|
||||
|
||||
weekly_commits <- weekly_commits |>
|
||||
filter(relative_week >= (-52) & relative_week <= 52 )
|
||||
|
||||
#gracefully exit
|
||||
return(weekly_commits)
|
||||
}
|
||||
|
||||
transformed <- transform_relevant_commit_data(ve_commit_fp)
|
||||
output_filepath <-"/mmfs1/gscratch/comdata/users/mjilg/mw-repo-lifecycles/case2/relevant_event_0404_mediawiki_core_weekly_commit_count_data.csv"
|
||||
|
||||
write.csv(transformed, output_filepath, row.names = FALSE)
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
library(tidyverse)
|
||||
count_data_fp <-"/mmfs1/gscratch/comdata/users/mjilg/mw-repo-lifecycles/case2/event_0403_mediawiki_core_weekly_commit_count_data.csv"
|
||||
count_data_fp <-"/mmfs1/gscratch/comdata/users/mjilg/mw-repo-lifecycles/case2/relevant_event_0404_mediawiki_core_weekly_commit_count_data.csv"
|
||||
input_df <- read.csv(count_data_fp, header = TRUE)
|
||||
|
||||
input_df$nonbot_commit_count <- input_df$commit_count - input_df$bot_commit_count
|
||||
@ -20,14 +20,14 @@ new_authors <- long_df |>
|
||||
ggplot(aes(x=relative_week,
|
||||
y=lengthened_commit_count,
|
||||
color=factor(commit_type))) +
|
||||
geom_line() +
|
||||
geom_point() +
|
||||
geom_line() +
|
||||
labs(x = "Relative Week", y = "Commits", color="Commit Type") +
|
||||
scale_color_manual(values = affiliationColors,
|
||||
labels = c("nonbot_commit_count" = "Total Nonbot Commits",
|
||||
"unaff_new_commit_count" = "New Unaffiliated Commits",
|
||||
"wmf_new_commit_count" = "New WMF Commits")) +
|
||||
ggtitle("MW-core Commits Around HTTPS as-default ('New' contributors <= 5 commits before 08-01-2013)") +
|
||||
ggtitle("relevant MW-core Commits Around HTTPS as-default ('New' contributors <= 5 commits before 08-01-2013)") +
|
||||
theme_bw() +
|
||||
theme(legend.position = "top")
|
||||
new_authors
|
||||
|
Loading…
Reference in New Issue
Block a user