bugfix.
This commit is contained in:
parent
0436450ea8
commit
39c0fa7a29
@ -62,7 +62,7 @@ def my_tokenizer(text, mwe_pass, mwe_tokenize, stopWords, ngram_output):
|
|||||||
for sentence in sentences:
|
for sentence in sentences:
|
||||||
if random() <= 0.1:
|
if random() <= 0.1:
|
||||||
grams = list(chain(*map(lambda i : ngrams(sentence,i),range(4))))
|
grams = list(chain(*map(lambda i : ngrams(sentence,i),range(4))))
|
||||||
Path(ngram_output).mkdir(parents=True, exist_ok=True)
|
Path(ngram_output).parent.mkdir(parents=True, exist_ok=True)
|
||||||
with open(ngram_output,'a') as gram_file:
|
with open(ngram_output,'a') as gram_file:
|
||||||
for ng in grams:
|
for ng in grams:
|
||||||
gram_file.write(' '.join(ng) + '\n')
|
gram_file.write(' '.join(ng) + '\n')
|
||||||
|
Loading…
Reference in New Issue
Block a user