migrate to mwxml. This completes the migration away from python-mediawiki-utilities. Except for preserving legacy persistence behavior, we can safely use the nice updates from the mediawiki-utils project.
This commit is contained in:
parent
d77b0a4965
commit
dba793c6ac
@ -1,9 +1,10 @@
|
||||
import unittest
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
from shutil import copyfile
|
||||
import pdb
|
||||
import pandas as pd
|
||||
from pandas.util.testing import assert_frame_equal
|
||||
from io import StringIO
|
||||
|
||||
# with / without pwr DONE
|
||||
# with / without url encode DONE
|
||||
@ -45,13 +46,10 @@ class Test_Wikipedia(unittest.TestCase):
|
||||
copyfile(self.call_output, test_file)
|
||||
baseline_file = os.path.join(".", self.baseline_output_dir, test_filename)
|
||||
|
||||
test_lines = open(test_file)
|
||||
baseline_lines = open(baseline_file)
|
||||
for test, baseline in zip(test_lines, baseline_lines):
|
||||
self.assertEqual(test,baseline)
|
||||
|
||||
test_lines.close()
|
||||
baseline_lines.close()
|
||||
# as a test let's make sure that we get equal data frames
|
||||
test = pd.read_table(test_file)
|
||||
baseline = pd.read_table(baseline_file)
|
||||
assert_frame_equal(test,baseline)
|
||||
|
||||
|
||||
class Test_Basic(unittest.TestCase):
|
||||
@ -86,13 +84,10 @@ class Test_Basic(unittest.TestCase):
|
||||
|
||||
baseline_file = os.path.join(".", self.baseline_output_dir, test_filename)
|
||||
|
||||
test_lines = open(test_file)
|
||||
baseline_lines = open(baseline_file)
|
||||
for test, baseline in zip(test_lines, baseline_lines):
|
||||
self.assertEqual(test, baseline)
|
||||
test = pd.read_table(test_file)
|
||||
baseline = pd.read_table(baseline_file)
|
||||
assert_frame_equal(test,baseline)
|
||||
|
||||
test_lines.close()
|
||||
baseline_lines.close()
|
||||
|
||||
def test_collapse_user(self):
|
||||
test_filename = "collapse-user_" + self.wikiq_out_name
|
||||
@ -109,14 +104,9 @@ class Test_Basic(unittest.TestCase):
|
||||
copyfile(self.call_output, test_file)
|
||||
|
||||
baseline_file = os.path.join(".", self.baseline_output_dir, test_filename)
|
||||
|
||||
test_lines = open(test_file)
|
||||
baseline_lines = open(baseline_file)
|
||||
for test, baseline in zip(test_lines, baseline_lines):
|
||||
self.assertEqual(test,baseline)
|
||||
|
||||
test_lines.close()
|
||||
baseline_lines.close()
|
||||
test = pd.read_table(test_file)
|
||||
baseline = pd.read_table(baseline_file)
|
||||
assert_frame_equal(test,baseline)
|
||||
|
||||
def test_pwr_legacy(self):
|
||||
test_filename = "persistence_legacy_" + self.wikiq_out_name
|
||||
@ -134,13 +124,30 @@ class Test_Basic(unittest.TestCase):
|
||||
|
||||
baseline_file = os.path.join(".", self.baseline_output_dir, test_filename)
|
||||
|
||||
test_lines = open(test_file)
|
||||
baseline_lines = open(baseline_file)
|
||||
for test, baseline in zip(test_lines, baseline_lines):
|
||||
self.assertEqual(test,baseline)
|
||||
test = pd.read_table(test_file)
|
||||
baseline = pd.read_table(baseline_file)
|
||||
assert_frame_equal(test,baseline)
|
||||
|
||||
def test_pwr(self):
|
||||
test_filename = "persistence_" + self.wikiq_out_name
|
||||
test_file = os.path.join(self.test_output_dir, test_filename)
|
||||
if os.path.exists(test_file):
|
||||
os.remove(test_file)
|
||||
|
||||
call = self.base_call.format(self.input_file, self.test_output_dir)
|
||||
call = call + " --persistence"
|
||||
proc = subprocess.Popen(call,stdout=subprocess.PIPE,shell=True)
|
||||
proc.wait()
|
||||
|
||||
|
||||
copyfile(self.call_output, test_file)
|
||||
|
||||
baseline_file = os.path.join(".", self.baseline_output_dir, test_filename)
|
||||
|
||||
test = pd.read_table(test_file)
|
||||
baseline = pd.read_table(baseline_file)
|
||||
assert_frame_equal(test,baseline)
|
||||
|
||||
test_lines.close()
|
||||
baseline_lines.close()
|
||||
|
||||
def test_url_encode(self):
|
||||
test_filename = "url-encode_" + self.wikiq_out_name
|
||||
@ -156,14 +163,10 @@ class Test_Basic(unittest.TestCase):
|
||||
|
||||
copyfile(self.call_output, test_file)
|
||||
baseline_file = os.path.join(".", self.baseline_output_dir, test_filename)
|
||||
test = pd.read_table(test_file)
|
||||
baseline = pd.read_table(baseline_file)
|
||||
assert_frame_equal(test,baseline)
|
||||
|
||||
test_lines = open(test_file)
|
||||
baseline_lines = open(baseline_file)
|
||||
for test, baseline in zip(test_lines, baseline_lines):
|
||||
self.assertEqual(test,baseline)
|
||||
|
||||
test_lines.close()
|
||||
baseline_lines.close()
|
||||
|
||||
class Test_Malformed(unittest.TestCase):
|
||||
|
||||
@ -207,42 +210,14 @@ class Test_Stdout(unittest.TestCase):
|
||||
|
||||
call = self.base_call.format(self.input_file)
|
||||
proc = subprocess.run(call,stdout=subprocess.PIPE,shell=True)
|
||||
outs = proc.stdout.decode('utf-8')
|
||||
|
||||
outs = proc.stdout.decode("utf8")
|
||||
|
||||
test_file = "noargs_" + self.wikiq_out_name
|
||||
baseline_file = os.path.join(".", self.baseline_output_dir, test_file)
|
||||
|
||||
test_lines = outs.splitlines(True)
|
||||
baseline_lines = open(baseline_file)
|
||||
for test, baseline in zip(test_lines, baseline_lines):
|
||||
self.assertEqual(test,baseline)
|
||||
|
||||
def test_persistence(self):
|
||||
|
||||
call = self.base_call.format(self.input_file) + " --persistence"
|
||||
proc = subprocess.run(call,stdout=subprocess.PIPE,shell=True)
|
||||
outs = proc.stdout.decode('utf-8')
|
||||
|
||||
test_file = "persistence_" + self.wikiq_out_name
|
||||
baseline_file = os.path.join(".", self.baseline_output_dir, test_file)
|
||||
|
||||
test_lines = outs.splitlines(True)
|
||||
baseline_lines = open(baseline_file)
|
||||
for test, baseline in zip(test_lines, baseline_lines):
|
||||
self.assertEqual(test,baseline)
|
||||
|
||||
# test_file = "noargs_" + self.wikiq_out_name
|
||||
# copyfile(self.call_output, os.path.join(self.test_output_dir, test_file))
|
||||
|
||||
# baseline_file = os.path.join(".", self.baseline_output_dir, test_file)
|
||||
|
||||
# test_lines = open(os.path.join(self.test_output_dir,test_file))
|
||||
# baseline_lines = open(baseline_file)
|
||||
# for test, baseline in zip(test_lines, baseline_lines):
|
||||
# self.assertEqual(test,baseline)
|
||||
# test_lines.close()
|
||||
# baseline_lines.close()
|
||||
|
||||
print(baseline_file)
|
||||
test = pd.read_table(StringIO(outs))
|
||||
baseline = pd.read_table(baseline_file)
|
||||
assert_frame_equal(test,baseline)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
43
wikiq
43
wikiq
@ -3,7 +3,6 @@
|
||||
# original wikiq headers are: title articleid revid date_time anon
|
||||
# editor editor_id minor text_size text_entropy text_md5 reversion
|
||||
# additions_size deletions_size
|
||||
import pdb
|
||||
import argparse
|
||||
import sys
|
||||
import os, os.path
|
||||
@ -13,7 +12,7 @@ from subprocess import Popen, PIPE
|
||||
from collections import deque
|
||||
from hashlib import sha1
|
||||
|
||||
from mw.xml_dump import Iterator
|
||||
from mwxml import Dump
|
||||
|
||||
from deltas.tokenizers import wikitext_split
|
||||
import mwpersistence
|
||||
@ -32,7 +31,7 @@ class WikiqIterator():
|
||||
def __init__(self, fh, collapse_user=False):
|
||||
self.fh = fh
|
||||
self.collapse_user = collapse_user
|
||||
self.mwiterator = Iterator.from_file(self.fh)
|
||||
self.mwiterator = Dump.from_file(self.fh)
|
||||
self.__pages = self.load_pages()
|
||||
|
||||
def load_pages(self):
|
||||
@ -80,7 +79,14 @@ class WikiqPage():
|
||||
else:
|
||||
if self.collapse_user:
|
||||
# yield if this is the last edit in a seq by a user and reset
|
||||
if not rev.contributor.user_text == prev_rev.contributor.user_text:
|
||||
# also yield if we do know who the user is
|
||||
|
||||
if rev.deleted.user or prev_rev.deleted.user:
|
||||
yield prev_rev
|
||||
collapsed_revs = 1
|
||||
rev.collapsed_revs = collapsed_revs
|
||||
|
||||
elif not rev.user.text == prev_rev.user.text:
|
||||
yield prev_rev
|
||||
collapsed_revs = 1
|
||||
rev.collapsed_revs = collapsed_revs
|
||||
@ -93,6 +99,7 @@ class WikiqPage():
|
||||
yield prev_rev
|
||||
|
||||
prev_rev = rev
|
||||
|
||||
# also yield the final time
|
||||
yield prev_rev
|
||||
|
||||
@ -142,7 +149,7 @@ class WikiqParser():
|
||||
dump = WikiqIterator(self.input_file, collapse_user=self.collapse_user)
|
||||
|
||||
# extract list of namspaces
|
||||
self.namespaces = {ns.name : ns.id for ns in dump.mwiterator.namespaces}
|
||||
self.namespaces = {ns.name : ns.id for ns in dump.mwiterator.site_info.namespaces}
|
||||
|
||||
page_count = 0
|
||||
rev_count = 0
|
||||
@ -169,23 +176,28 @@ class WikiqParser():
|
||||
rev_data = {'revid' : rev.id,
|
||||
'date_time' : rev.timestamp.strftime('%Y-%m-%d %H:%M:%S'),
|
||||
'articleid' : page.id,
|
||||
'editor_id' : "" if rev.contributor.id == None else rev.contributor.id,
|
||||
'editor_id' : "" if rev.deleted.user == True or rev.user.id is None else rev.user.id,
|
||||
'title' : '"' + page.title + '"',
|
||||
'namespace' : page.namespace if page.namespace else self.__get_namespace_from_title(page.title),
|
||||
'deleted' : "TRUE" if rev.text.deleted else "FALSE" }
|
||||
'namespace' : page.namespace if page.namespace is not None else self.__get_namespace_from_title(page.title),
|
||||
'deleted' : "TRUE" if rev.deleted.text else "FALSE" }
|
||||
|
||||
# if revisions are deleted, /many/ things will be missing
|
||||
if rev.text.deleted:
|
||||
if rev.deleted.text:
|
||||
rev_data['text_chars'] = ""
|
||||
rev_data['sha1'] = ""
|
||||
rev_data['revert'] = ""
|
||||
rev_data['reverteds'] = ""
|
||||
|
||||
else:
|
||||
# rev.text can be None if the page has no text
|
||||
if not rev.text:
|
||||
rev.text = ""
|
||||
# if text exists, we'll check for a sha1 and generate one otherwise
|
||||
|
||||
if rev.sha1:
|
||||
text_sha1 = rev.sha1
|
||||
else:
|
||||
|
||||
text_sha1 = sha1(bytes(rev.text, "utf8")).hexdigest()
|
||||
|
||||
rev_data['sha1'] = text_sha1
|
||||
@ -206,10 +218,10 @@ class WikiqParser():
|
||||
# if the fact that the edit was minor can be hidden, this might be an issue
|
||||
rev_data['minor'] = "TRUE" if rev.minor else "FALSE"
|
||||
|
||||
if rev.contributor.user_text:
|
||||
if not rev.deleted.user:
|
||||
# wrap user-defined editors in quotes for fread
|
||||
rev_data['editor'] = '"' + rev.contributor.user_text + '"'
|
||||
rev_data['anon'] = "TRUE" if rev.contributor.id == None else "FALSE"
|
||||
rev_data['editor'] = '"' + rev.user.text + '"'
|
||||
rev_data['anon'] = "TRUE" if rev.user.id == None else "FALSE"
|
||||
|
||||
else:
|
||||
rev_data['anon'] = ""
|
||||
@ -227,7 +239,8 @@ class WikiqParser():
|
||||
rev_data['collapsed_revs'] = rev.collapsed_revs
|
||||
|
||||
if self.persist or self.persist_legacy:
|
||||
if rev.text.deleted:
|
||||
if rev.deleted.text:
|
||||
|
||||
for k in ["token_revs", "tokens_added", "tokens_removed", "tokens_window"]:
|
||||
old_rev_data[k] = None
|
||||
else:
|
||||
@ -236,7 +249,7 @@ class WikiqParser():
|
||||
_, tokens_added, tokens_removed = state.update(rev.text, rev.id)
|
||||
|
||||
else:
|
||||
_, tokens_added, tokens_removed = state.process(rev.text, rev.id,text_sha1)
|
||||
_, tokens_added, tokens_removed = state.process(rev.text, rev.id, text_sha1)
|
||||
|
||||
window.append((rev.id, rev_data, tokens_added, tokens_removed))
|
||||
|
||||
@ -284,7 +297,7 @@ class WikiqParser():
|
||||
if self.urlencode:
|
||||
for field in TO_ENCODE:
|
||||
rev_data[field] = quote(str(rev_data[field]))
|
||||
|
||||
|
||||
if not self.printed_header:
|
||||
print("\t".join([str(k) for k in sorted(rev_data.keys())]), file=self.output_file)
|
||||
self.printed_header = True
|
||||
|
Loading…
Reference in New Issue
Block a user