URI: 
       tMerge pull request #5758 from mbarkhau/master - electrum - Electrum Bitcoin wallet
  HTML git clone https://git.parazyd.org/electrum
   DIR Log
   DIR Files
   DIR Refs
   DIR Submodules
       ---
   DIR commit bc4f22503f3c38a1c458db4ede364f0ea02dd9b1
   DIR parent c2c291dd3a9b2dac8e188158317bd998701a0ded
  HTML Author: ghost43 <somber.night@protonmail.com>
       Date:   Fri, 15 Nov 2019 09:59:40 +0000
       
       Merge pull request #5758 from mbarkhau/master
       
       Mnemonic performance improvements
       Diffstat:
         M electrum/mnemonic.py                |      38 +++++++++++++++++++------------
       
       1 file changed, 23 insertions(+), 15 deletions(-)
       ---
   DIR diff --git a/electrum/mnemonic.py b/electrum/mnemonic.py
       t@@ -89,20 +89,29 @@ def normalize_text(seed: str) -> str:
            seed = u''.join([seed[i] for i in range(len(seed)) if not (seed[i] in string.whitespace and is_CJK(seed[i-1]) and is_CJK(seed[i+1]))])
            return seed
        
       +
       +_WORDLIST_CACHE = {}
       +
       +
        def load_wordlist(filename):
            path = resource_path('wordlist', filename)
       -    with open(path, 'r', encoding='utf-8') as f:
       -        s = f.read().strip()
       -    s = unicodedata.normalize('NFKD', s)
       -    lines = s.split('\n')
       -    wordlist = []
       -    for line in lines:
       -        line = line.split('#')[0]
       -        line = line.strip(' \r')
       -        assert ' ' not in line
       -        if line:
       -            wordlist.append(line)
       -    return wordlist
       +    if path not in _WORDLIST_CACHE:
       +        with open(path, 'r', encoding='utf-8') as f:
       +            s = f.read().strip()
       +        s = unicodedata.normalize('NFKD', s)
       +        lines = s.split('\n')
       +        wordlist = []
       +        for line in lines:
       +            line = line.split('#')[0]
       +            line = line.strip(' \r')
       +            assert ' ' not in line
       +            if line:
       +                wordlist.append(line)
       +
       +        # wordlists shouldn't be mutated, but just in case,
       +        # convert it to a tuple
       +        _WORDLIST_CACHE[path] = tuple(wordlist)
       +    return _WORDLIST_CACHE[path]
        
        
        filenames = {
       t@@ -114,8 +123,6 @@ filenames = {
        }
        
        
       -# FIXME every time we instantiate this class, we read the wordlist from disk
       -# and store a new copy of it in memory
        class Mnemonic(Logger):
            # Seed derivation does not follow BIP39
            # Mnemonic phrase uses a hash based checksum, instead of a wordlist-dependent checksum
       t@@ -126,6 +133,7 @@ class Mnemonic(Logger):
                self.logger.info(f'language {lang}')
                filename = filenames.get(lang[0:2], 'english.txt')
                self.wordlist = load_wordlist(filename)
       +        self.wordlist_indexes = {w: i for i, w in enumerate(self.wordlist)}
                self.logger.info(f"wordlist has {len(self.wordlist)} words")
        
            @classmethod
       t@@ -156,7 +164,7 @@ class Mnemonic(Logger):
                i = 0
                while words:
                    w = words.pop()
       -            k = self.wordlist.index(w)
       +            k = self.wordlist_indexes[w]
                    i = i*n + k
                return i