migrate to mwpersistence. this fixes many issues. We preserve legacy persistence behavior using the --persistence-legacy.
This commit is contained in:
parent
7db6288923
commit
d77b0a4965
3
.gitmodules
vendored
3
.gitmodules
vendored
@ -1,3 +0,0 @@
|
||||
[submodule "Mediawiki-Utilities"]
|
||||
path = Mediawiki-Utilities
|
||||
url = https://github.com/halfak/Mediawiki-Utilities.git
|
@ -32,15 +32,20 @@ class Test_Wikipedia(unittest.TestCase):
|
||||
self.baseline_output_dir = "baseline_output"
|
||||
|
||||
def test_WP_url_encode(self):
|
||||
test_filename = "url-encode_" + self.wikiq_out_name
|
||||
test_file = os.path.join(self.test_output_dir, test_filename)
|
||||
if os.path.exists(test_file):
|
||||
os.remove(test_file)
|
||||
|
||||
call = self.base_call.format(self.input_file, self.test_output_dir)
|
||||
call = call + " --url-encode"
|
||||
proc = subprocess.Popen(call,stdout=subprocess.PIPE,shell=True)
|
||||
proc.wait()
|
||||
test_file = "url-encode_" + self.wikiq_out_name
|
||||
copyfile(self.call_output, os.path.join(self.test_output_dir, test_file))
|
||||
baseline_file = os.path.join(".", self.baseline_output_dir, test_file)
|
||||
|
||||
test_lines = open(os.path.join(self.test_output_dir,test_file))
|
||||
copyfile(self.call_output, test_file)
|
||||
baseline_file = os.path.join(".", self.baseline_output_dir, test_filename)
|
||||
|
||||
test_lines = open(test_file)
|
||||
baseline_lines = open(baseline_file)
|
||||
for test, baseline in zip(test_lines, baseline_lines):
|
||||
self.assertEqual(test,baseline)
|
||||
@ -68,15 +73,20 @@ class Test_Basic(unittest.TestCase):
|
||||
|
||||
def test_noargs(self):
|
||||
|
||||
test_filename = "noargs_" + self.wikiq_out_name
|
||||
test_file = os.path.join(self.test_output_dir, test_filename)
|
||||
if os.path.exists(test_file):
|
||||
os.remove(test_file)
|
||||
|
||||
call = self.base_call.format(self.input_file, self.test_output_dir)
|
||||
proc = subprocess.Popen(call,stdout=subprocess.PIPE,shell=True)
|
||||
proc.wait()
|
||||
test_file = "noargs_" + self.wikiq_out_name
|
||||
copyfile(self.call_output, os.path.join(self.test_output_dir, test_file))
|
||||
|
||||
baseline_file = os.path.join(".", self.baseline_output_dir, test_file)
|
||||
copyfile(self.call_output, test_file)
|
||||
|
||||
test_lines = open(os.path.join(self.test_output_dir,test_file))
|
||||
baseline_file = os.path.join(".", self.baseline_output_dir, test_filename)
|
||||
|
||||
test_lines = open(test_file)
|
||||
baseline_lines = open(baseline_file)
|
||||
for test, baseline in zip(test_lines, baseline_lines):
|
||||
self.assertEqual(test, baseline)
|
||||
@ -85,18 +95,22 @@ class Test_Basic(unittest.TestCase):
|
||||
baseline_lines.close()
|
||||
|
||||
def test_collapse_user(self):
|
||||
test_filename = "collapse-user_" + self.wikiq_out_name
|
||||
test_file = os.path.join(self.test_output_dir, test_filename)
|
||||
if os.path.exists(test_file):
|
||||
os.remove(test_file)
|
||||
|
||||
call = self.base_call.format(self.input_file, self.test_output_dir)
|
||||
call = call + " --collapse-user"
|
||||
|
||||
proc = subprocess.Popen(call,stdout=subprocess.PIPE,shell=True)
|
||||
proc.wait()
|
||||
|
||||
test_file = "collapse-user_" + self.wikiq_out_name
|
||||
copyfile(self.call_output, os.path.join(self.test_output_dir, test_file))
|
||||
copyfile(self.call_output, test_file)
|
||||
|
||||
baseline_file = os.path.join(".", self.baseline_output_dir, test_file)
|
||||
baseline_file = os.path.join(".", self.baseline_output_dir, test_filename)
|
||||
|
||||
test_lines = open(os.path.join(self.test_output_dir,test_file))
|
||||
test_lines = open(test_file)
|
||||
baseline_lines = open(baseline_file)
|
||||
for test, baseline in zip(test_lines, baseline_lines):
|
||||
self.assertEqual(test,baseline)
|
||||
@ -104,18 +118,23 @@ class Test_Basic(unittest.TestCase):
|
||||
test_lines.close()
|
||||
baseline_lines.close()
|
||||
|
||||
def test_pwr(self):
|
||||
def test_pwr_legacy(self):
|
||||
test_filename = "persistence_legacy_" + self.wikiq_out_name
|
||||
test_file = os.path.join(self.test_output_dir, test_filename)
|
||||
if os.path.exists(test_file):
|
||||
os.remove(test_file)
|
||||
|
||||
call = self.base_call.format(self.input_file, self.test_output_dir)
|
||||
call = call + " --persistence"
|
||||
call = call + " --persistence-legacy"
|
||||
proc = subprocess.Popen(call,stdout=subprocess.PIPE,shell=True)
|
||||
proc.wait()
|
||||
|
||||
test_file = "persistence_" + self.wikiq_out_name
|
||||
copyfile(self.call_output, os.path.join(self.test_output_dir, test_file))
|
||||
|
||||
baseline_file = os.path.join(".", self.baseline_output_dir, test_file)
|
||||
copyfile(self.call_output, test_file)
|
||||
|
||||
test_lines = open(os.path.join(self.test_output_dir,test_file))
|
||||
baseline_file = os.path.join(".", self.baseline_output_dir, test_filename)
|
||||
|
||||
test_lines = open(test_file)
|
||||
baseline_lines = open(baseline_file)
|
||||
for test, baseline in zip(test_lines, baseline_lines):
|
||||
self.assertEqual(test,baseline)
|
||||
@ -124,15 +143,21 @@ class Test_Basic(unittest.TestCase):
|
||||
baseline_lines.close()
|
||||
|
||||
def test_url_encode(self):
|
||||
test_filename = "url-encode_" + self.wikiq_out_name
|
||||
|
||||
test_file = os.path.join(self.test_output_dir, test_filename)
|
||||
if os.path.exists(test_file):
|
||||
os.remove(test_file)
|
||||
|
||||
call = self.base_call.format(self.input_file, self.test_output_dir)
|
||||
call = call + " --url-encode"
|
||||
proc = subprocess.Popen(call,stdout=subprocess.PIPE,shell=True)
|
||||
proc.wait()
|
||||
test_file = "url-encode_" + self.wikiq_out_name
|
||||
copyfile(self.call_output, os.path.join(self.test_output_dir, test_file))
|
||||
baseline_file = os.path.join(".", self.baseline_output_dir, test_file)
|
||||
|
||||
test_lines = open(os.path.join(self.test_output_dir,test_file))
|
||||
copyfile(self.call_output, test_file)
|
||||
baseline_file = os.path.join(".", self.baseline_output_dir, test_filename)
|
||||
|
||||
test_lines = open(test_file)
|
||||
baseline_lines = open(baseline_file)
|
||||
for test, baseline in zip(test_lines, baseline_lines):
|
||||
self.assertEqual(test,baseline)
|
||||
@ -192,6 +217,20 @@ class Test_Stdout(unittest.TestCase):
|
||||
for test, baseline in zip(test_lines, baseline_lines):
|
||||
self.assertEqual(test,baseline)
|
||||
|
||||
def test_persistence(self):
|
||||
|
||||
call = self.base_call.format(self.input_file) + " --persistence"
|
||||
proc = subprocess.run(call,stdout=subprocess.PIPE,shell=True)
|
||||
outs = proc.stdout.decode('utf-8')
|
||||
|
||||
test_file = "persistence_" + self.wikiq_out_name
|
||||
baseline_file = os.path.join(".", self.baseline_output_dir, test_file)
|
||||
|
||||
test_lines = outs.splitlines(True)
|
||||
baseline_lines = open(baseline_file)
|
||||
for test, baseline in zip(test_lines, baseline_lines):
|
||||
self.assertEqual(test,baseline)
|
||||
|
||||
# test_file = "noargs_" + self.wikiq_out_name
|
||||
# copyfile(self.call_output, os.path.join(self.test_output_dir, test_file))
|
||||
|
||||
|
4652
test/baseline_output/persistence_legacy_sailormoon.tsv
Normal file
4652
test/baseline_output/persistence_legacy_sailormoon.tsv
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
42
wikiq
42
wikiq
@ -3,7 +3,7 @@
|
||||
# original wikiq headers are: title articleid revid date_time anon
|
||||
# editor editor_id minor text_size text_entropy text_md5 reversion
|
||||
# additions_size deletions_size
|
||||
|
||||
import pdb
|
||||
import argparse
|
||||
import sys
|
||||
import os, os.path
|
||||
@ -14,16 +14,20 @@ from collections import deque
|
||||
from hashlib import sha1
|
||||
|
||||
from mw.xml_dump import Iterator
|
||||
from mw.lib import persistence
|
||||
|
||||
from deltas.tokenizers import wikitext_split
|
||||
import mwpersistence
|
||||
import mwreverts
|
||||
from urllib.parse import quote
|
||||
TO_ENCODE = ('title', 'editor')
|
||||
PERSISTENCE_RADIUS=7
|
||||
from deltas import SequenceMatcher
|
||||
|
||||
def calculate_persistence(tokens_added):
|
||||
return(sum([(len(x.revisions)-1) for x in tokens_added]),
|
||||
len(tokens_added))
|
||||
|
||||
|
||||
class WikiqIterator():
|
||||
def __init__(self, fh, collapse_user=False):
|
||||
self.fh = fh
|
||||
@ -101,12 +105,13 @@ class WikiqPage():
|
||||
class WikiqParser():
|
||||
|
||||
|
||||
def __init__(self, input_file, output_file, collapse_user=False, persist=False, urlencode=False):
|
||||
def __init__(self, input_file, output_file, collapse_user=False, persist=False, urlencode=False, persist_legacy=False):
|
||||
|
||||
self.input_file = input_file
|
||||
self.output_file = output_file
|
||||
self.collapse_user = collapse_user
|
||||
self.persist = persist
|
||||
self.persist_legacy = persist_legacy
|
||||
self.printed_header = False
|
||||
self.namespaces = []
|
||||
self.urlencode = urlencode
|
||||
@ -141,13 +146,22 @@ class WikiqParser():
|
||||
|
||||
page_count = 0
|
||||
rev_count = 0
|
||||
|
||||
|
||||
# Iterate through pages
|
||||
for page in dump:
|
||||
if self.persist:
|
||||
state = persistence.State()
|
||||
rev_detector = mwreverts.Detector()
|
||||
|
||||
if self.persist or self.persist_legacy:
|
||||
window = deque(maxlen=PERSISTENCE_RADIUS)
|
||||
|
||||
rev_detector = mwreverts.Detector()
|
||||
if not self.persist_legacy:
|
||||
state = mwpersistence.DiffState(SequenceMatcher(tokenizer = wikitext_split),
|
||||
revert_radius=PERSISTENCE_RADIUS)
|
||||
|
||||
else:
|
||||
from mw.lib import persistence
|
||||
state = persistence.State()
|
||||
|
||||
# Iterate through a page's revisions
|
||||
for rev in page:
|
||||
@ -181,6 +195,7 @@ class WikiqParser():
|
||||
|
||||
# generate revert data
|
||||
revert = rev_detector.process(text_sha1, rev.id)
|
||||
|
||||
if revert:
|
||||
rev_data['revert'] = "TRUE"
|
||||
rev_data['reverteds'] = '"' + ",".join([str(x) for x in revert.reverteds]) + '"'
|
||||
@ -211,12 +226,18 @@ class WikiqParser():
|
||||
if self.collapse_user:
|
||||
rev_data['collapsed_revs'] = rev.collapsed_revs
|
||||
|
||||
if self.persist:
|
||||
if self.persist or self.persist_legacy:
|
||||
if rev.text.deleted:
|
||||
for k in ["token_revs", "tokens_added", "tokens_removed", "tokens_window"]:
|
||||
old_rev_data[k] = None
|
||||
else:
|
||||
|
||||
if not self.persist_legacy:
|
||||
_, tokens_added, tokens_removed = state.update(rev.text, rev.id)
|
||||
|
||||
else:
|
||||
_, tokens_added, tokens_removed = state.process(rev.text, rev.id,text_sha1)
|
||||
|
||||
window.append((rev.id, rev_data, tokens_added, tokens_removed))
|
||||
|
||||
if len(window) == PERSISTENCE_RADIUS:
|
||||
@ -236,7 +257,7 @@ class WikiqParser():
|
||||
|
||||
rev_count += 1
|
||||
|
||||
if self.persist:
|
||||
if self.persist or self.persist_legacy:
|
||||
# print out metadata for the last RADIUS revisions
|
||||
for i, item in enumerate(window):
|
||||
# if the window was full, we've already printed item 0
|
||||
@ -316,6 +337,9 @@ parser.add_argument('-p', '--persistence', dest="persist", action="store_true",
|
||||
parser.add_argument('-u', '--url-encode', dest="urlencode", action="store_true",
|
||||
help="Output url encoded text strings. This works around some data issues like newlines in editor names. In the future it may be used to output other text data.")
|
||||
|
||||
parser.add_argument('--persistence-legacy', dest="persist_legacy", action="store_true",
|
||||
help="Legacy behavior for persistence calculation. Output url encoded text strings. This works around some data issues like newlines in editor names. In the future it may be used to output other text data.")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if len(args.dumpfiles) > 0:
|
||||
@ -339,6 +363,7 @@ if len(args.dumpfiles) > 0:
|
||||
wikiq = WikiqParser(input_file, output_file,
|
||||
collapse_user=args.collapse_user,
|
||||
persist=args.persist,
|
||||
persist_legacy=args.persist_legacy,
|
||||
urlencode=args.urlencode)
|
||||
|
||||
|
||||
@ -351,6 +376,7 @@ else:
|
||||
wikiq = WikiqParser(sys.stdin, sys.stdout,
|
||||
collapse_user=args.collapse_user,
|
||||
persist=args.persist,
|
||||
persist_legacy=args.persist_legacy,
|
||||
urlencode=args.urlencode)
|
||||
wikiq.process()
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user