Distance between due string
from nltk import metrics, stem, tokenize
stemmer = stem.PorterStemmer()
def normalize(s):
words = tokenize.wordpunct_tokenize(s.lower().strip())
return ' '.join([stemmer.stem(w) for w in words])
def fuzzy_match(s1, s2, max_dist=3):
return metrics.edit_distance(normalize(s1), normalize(s2)) <= max_dist
s=""
while s.lower()!="bye":
s=raw_input("You: ")
s_array=s.split(";")
if len(s_array)>1:
s1=s_array[0].strip()
s2=s_array[1].strip()
print fuzzy_match(s1,s2)