Browse Source

added functionality but need to speedtest

multiprocess_search
Raphael Roberts 7 years ago
parent
commit
ab1afc4f7a
  1. 20
      hangman.py

20
hangman.py

@ -2,6 +2,7 @@ import codecs
import hashlib
import json
import os
import multiprocessing
import pickle
import re
from string import ascii_lowercase as alphabet
@ -70,10 +71,21 @@ def generate_letter_frequency(word_list):
if cached is None:
save_freq_cache(word_list,ret)
return ret
def filter_wordlist(input,remaining_letters,word_list):
class bool_regex:
def __init__(self,expr):
self.expr = expr
def __call__(self,arg):
return bool(self.expr.match(arg))
def filter_wordlist(input,remaining_letters,word_list,mp=True):
regex = re.compile(input.replace('.','[{}]'.format(''.join(remaining_letters))) + '$')
matches = map(regex.match,word_list)
if mp:
regex = bool_regex(regex)
pool = multiprocessing.Pool()
matches = pool.map(regex,word_list)
pool.close()
pool.join()
else:
matches = map(regex.match,word_list)
remaining_words = (group[1] for group in filter(lambda group: group[0],zip(matches,word_list)))
return list(remaining_words)
@ -156,7 +168,7 @@ def iterate(word_list,let_freq,prev_word = None):
entered_letters.update(re.findall('[a-z]',word))
remaining_letters = (ALPHABET & set(let_freq.keys())) - entered_letters - negatives
for i,word in enumerate(entered_words):
remaining_possibilities = filter_wordlist(word,remaining_letters,word_list[i])
remaining_possibilities = filter_wordlist(word,remaining_letters,word_list[i],mp=True)
word_list[i] = remaining_possibilities
print('Matches found:', '\n'.join(multi_word(word_list,10)),sep='\n')
print_likely_chars(remaining_letters,let_freq)

Loading…
Cancel
Save