|
|
|
@ -1,5 +1,4 @@ |
|
|
|
import codecs |
|
|
|
import functools |
|
|
|
import hashlib |
|
|
|
import json |
|
|
|
import pickle |
|
|
|
@ -17,21 +16,19 @@ def load_words(filename): |
|
|
|
text = file.read() |
|
|
|
return set(map(str.lower,filter(bool,text.split('\n')))) |
|
|
|
|
|
|
|
@functools.lru_cache(maxsize=None) |
|
|
|
def _get_wordlist_hash(word_list_s): |
|
|
|
hash = HASH_FUNC() |
|
|
|
_hash = HASH_FUNC() |
|
|
|
for word in sorted(word_list_s): |
|
|
|
word_bytes = word.encdode() |
|
|
|
hash.update(word_bytes) |
|
|
|
return hash.digest() |
|
|
|
word_bytes = word.encode() |
|
|
|
_hash.update(word_bytes) |
|
|
|
return _hash.digest() |
|
|
|
|
|
|
|
def hash_wordlist(word_list,raw = False): |
|
|
|
hash = HASH_FUNC() |
|
|
|
word_list = sorted(word_list) |
|
|
|
fhash = _get_wordlist_hash(word_list) |
|
|
|
if raw: |
|
|
|
return fhash |
|
|
|
illegal_hash = codecs.encdode(fhash,'base64').decode() |
|
|
|
illegal_hash = codecs.encode(fhash,'base64').decode() |
|
|
|
t_table = str.maketrans({'+':'-','/':'_'}) |
|
|
|
return illegal_hash.translate(t_table) |
|
|
|
|
|
|
|
|