Refactor and reorganze.
This commit is contained in:
33
dumps/check_comments_shas.py
Executable file
33
dumps/check_comments_shas.py
Executable file
@@ -0,0 +1,33 @@
|
||||
#!/usr/bin/env python3
|
||||
# run from a build_machine
|
||||
|
||||
import requests
|
||||
from os import path
|
||||
import hashlib
|
||||
|
||||
shasums1 = requests.get("https://files.pushshift.io/reddit/comments/sha256sum.txt").text
|
||||
shasums2 = requests.get("https://files.pushshift.io/reddit/comments/daily/sha256sum.txt").text
|
||||
|
||||
shasums = shasums1 + shasums2
|
||||
dumpdir = "/gscratch/comdata/raw_data/reddit_dumps/comments"
|
||||
|
||||
for l in shasums.strip().split('\n'):
|
||||
sha256_hash = hashlib.sha256()
|
||||
parts = l.split(' ')
|
||||
|
||||
correct_sha256 = parts[0]
|
||||
filename = parts[-1]
|
||||
print(f"checking {filename}")
|
||||
fpath = path.join(dumpdir,filename)
|
||||
if path.isfile(fpath):
|
||||
with open(fpath,'rb') as f:
|
||||
for byte_block in iter(lambda: f.read(4096),b""):
|
||||
sha256_hash.update(byte_block)
|
||||
|
||||
if sha256_hash.hexdigest() == correct_sha256:
|
||||
print(f"{filename} checks out")
|
||||
else:
|
||||
print(f"ERROR! {filename} has the wrong hash. Redownload and recheck!")
|
||||
else:
|
||||
print(f"Skipping {filename} as it doesn't exist")
|
||||
|
||||
31
dumps/check_submission_shas.py
Executable file
31
dumps/check_submission_shas.py
Executable file
@@ -0,0 +1,31 @@
|
||||
#!/usr/bin/env python3
|
||||
# run from a build_machine
|
||||
|
||||
import requests
|
||||
from os import path
|
||||
import hashlib
|
||||
|
||||
file1 = requests.get("https://files.pushshift.io/reddit/submissions/sha256sums.txt").text
|
||||
file2 = requests.get("https://files.pushshift.io/reddit/submissions/old_v1_data/sha256sums.txt").text
|
||||
dumpdir = "/gscratch/comdata/raw_data/reddit_dumps/submissions"
|
||||
|
||||
for l in file1.strip().split('\n') + file2.strip().split('\n'):
|
||||
sha256_hash = hashlib.sha256()
|
||||
parts = l.split(' ')
|
||||
|
||||
correct_sha256 = parts[0]
|
||||
filename = parts[-1]
|
||||
print(f"checking {filename}")
|
||||
fpath = path.join(dumpdir,filename)
|
||||
if path.isfile(fpath):
|
||||
with open(fpath,'rb') as f:
|
||||
for byte_block in iter(lambda: f.read(4096),b""):
|
||||
sha256_hash.update(byte_block)
|
||||
|
||||
if sha256_hash.hexdigest() == correct_sha256:
|
||||
print(f"{filename} checks out")
|
||||
else:
|
||||
print(f"ERROR! {filename} has the wrong hash. Redownload and recheck!")
|
||||
else:
|
||||
print(f"Skipping {filename} as it doesn't exist")
|
||||
|
||||
14
dumps/pull_pushshift_comments.sh
Executable file
14
dumps/pull_pushshift_comments.sh
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/bin/bash
|
||||
|
||||
user_agent='nathante teblunthuis <nathante@uw.edu>'
|
||||
output_dir='/gscratch/comdata/raw_data/reddit_dumps/comments'
|
||||
base_url='https://files.pushshift.io/reddit/comments/'
|
||||
|
||||
wget -r --no-parent -A 'RC_201*.bz2' -U $user_agent -P $output_dir -nd -nc $base_url
|
||||
wget -r --no-parent -A 'RC_201*.xz' -U $user_agent -P $output_dir -nd -nc $base_url
|
||||
wget -r --no-parent -A 'RC_201*.zst' -U $user_agent -P $output_dir -nd -nc $base_url
|
||||
|
||||
# starting in 2020 we use daily dumps not monthly dumps
|
||||
wget -r --no-parent -A 'RC_202*.gz' -U $user_agent -P $output_dir -nd -nc $base_url/daily/
|
||||
|
||||
./check_comments_shas.py
|
||||
14
dumps/pull_pushshift_submissions.sh
Executable file
14
dumps/pull_pushshift_submissions.sh
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/bin/bash
|
||||
|
||||
user_agent='nathante teblunthuis <nathante@uw.edu>'
|
||||
output_dir='/gscratch/comdata/raw_data/reddit_dumps/submissions'
|
||||
base_url='https://files.pushshift.io/reddit/submissions/'
|
||||
|
||||
wget -r --no-parent -A 'RS_20*.bz2' -U $user_agent -P $output_dir -nd -nc $base_url
|
||||
wget -r --no-parent -A 'RS_20*.xz' -U $user_agent -P $output_dir -nd -nc $base_url
|
||||
wget -r --no-parent -A 'RS_20*.zst' -U $user_agent -P $output_dir -nd -nc $base_url
|
||||
wget -r --no-parent -A 'RS_20*.bz2' -U $user_agent -P $output_dir -nd -nc $base_url/old_v1_data/
|
||||
wget -r --no-parent -A 'RS_20*.xz' -U $user_agent -P $output_dir -nd -nc $base_url/old_v1_data/
|
||||
wget -r --no-parent -A 'RS_20*.zst' -U $user_agent -P $output_dir -nd -nc $base_url/old_v1_data/
|
||||
|
||||
./check_submission_shas.py
|
||||
Reference in New Issue
Block a user