t_rex / filtering_denoise.py
asahi417's picture
init
7fe48b5
raw
history blame
1.27 kB
import json
import string
import re
stopwords = ["he", "she", "they", "it"]
list_alnum = string.ascii_lowercase + '0123456789 '
def filtering(entry):
def _subfilter(token):
if len(re.findall(rf'[^{list_alnum}]+', token)) != 0:
return False
if token in stopwords:
return False
if token.startswith("www"):
return False
if token.startswith("."):
return False
if token.startswith(","):
return False
if token.startswith("$"):
return False
if token.startswith("+"):
return False
if token.startswith("#"):
return False
return True
if not _subfilter(entry["object"].lower()):
return False
if not _subfilter(entry["subject"].lower()):
return False
if entry['object'].islower() and entry['subject'].islower():
return False
return True
with open(f"data/t_rex.raw.jsonl") as f:
data = [json.loads(i) for i in f.read().split('\n') if len(i) > 0]
print(f"[before]: {len(data)}")
data = [i for i in data if filtering(i)]
print(f"[after] : {len(data)}")
with open(f"data/t_rex.filter.jsonl", 'w') as f:
f.write('\n'.join([json.dumps(i) for i in data]))