paranormal-or-skeptic/tokenizer.py

22 lines
652 B
Python
Raw Normal View History

2020-04-02 15:45:53 +02:00
#!/usr/bin/python3
from nltk.tokenize import word_tokenize
2020-04-04 19:02:51 +02:00
from nltk.corpus import stopwords
2020-04-02 15:45:53 +02:00
import nltk
import re
import string
2020-04-04 19:02:51 +02:00
stop_words = set(stopwords.words('english'))
printable = set(string.printable)
2020-04-02 15:45:53 +02:00
def tokenize(d):
2020-04-04 19:02:51 +02:00
d = re.sub(r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', 'thereisasimplelinkinside', d, flags=re.MULTILINE)
2020-04-02 15:45:53 +02:00
d = re.sub(r'\\n',' ',d)
2020-04-04 19:02:51 +02:00
d = re.sub(r'\*|\'|\"|\/|~|_|=|-',' ',d)
d = ''.join(filter(lambda x: x in printable, d))
tokenized = word_tokenize(d)
lower = [w.lower() for w in tokenized]
words = [w for w in lower if not w in stop_words]
2020-04-02 15:45:53 +02:00
return words