URI: 
       tblockchain: check best chain on disk is consistent with checkpoints - electrum - Electrum Bitcoin wallet
  HTML git clone https://git.parazyd.org/electrum
   DIR Log
   DIR Files
   DIR Refs
   DIR Submodules
       ---
   DIR commit 73e2b09ba82fcc34f2762500b7ab3d7b839b5162
   DIR parent 2484c52611defa96ed060af4152707b3d1bfdea5
  HTML Author: SomberNight <somber.night@protonmail.com>
       Date:   Fri, 30 Nov 2018 16:36:37 +0100
       
       blockchain: check best chain on disk is consistent with checkpoints
       
       had a corrupted mainnet datadir that had testnet blockchain_headers file
       (I had probably corrupted it myself but electrum could not recover from it)
       
       Diffstat:
         M electrum/blockchain.py              |      27 ++++++++++++++++++---------
       
       1 file changed, 18 insertions(+), 9 deletions(-)
       ---
   DIR diff --git a/electrum/blockchain.py b/electrum/blockchain.py
       t@@ -86,11 +86,20 @@ blockchains_lock = threading.RLock()
        
        
        def read_blockchains(config: 'SimpleConfig'):
       -    blockchains[constants.net.GENESIS] = Blockchain(config=config,
       -                                                    forkpoint=0,
       -                                                    parent=None,
       -                                                    forkpoint_hash=constants.net.GENESIS,
       -                                                    prev_hash=None)
       +    best_chain = Blockchain(config=config,
       +                            forkpoint=0,
       +                            parent=None,
       +                            forkpoint_hash=constants.net.GENESIS,
       +                            prev_hash=None)
       +    blockchains[constants.net.GENESIS] = best_chain
       +    # consistency checks
       +    if best_chain.height() > constants.net.max_checkpoint():
       +        header_after_cp = best_chain.read_header(constants.net.max_checkpoint()+1)
       +        if not header_after_cp or not best_chain.can_connect(header_after_cp, check_height=False):
       +            util.print_error("[blockchain] deleting best chain. cannot connect header after last cp to last cp.")
       +            os.unlink(best_chain.path())
       +            best_chain.update_size()
       +    # forks
            fdir = os.path.join(util.get_headers_dir(config), 'forks')
            util.make_dir(fdir)
            # files are named as: fork2_{forkpoint}_{prev_hash}_{first_hash}
       t@@ -98,7 +107,7 @@ def read_blockchains(config: 'SimpleConfig'):
            l = sorted(l, key=lambda x: int(x.split('_')[1]))  # sort by forkpoint
        
            def delete_chain(filename, reason):
       -        util.print_error("[blockchain]", reason, filename)
       +        util.print_error(f"[blockchain] deleting chain {filename}: {reason}")
                os.unlink(os.path.join(fdir, filename))
        
            def instantiate_chain(filename):
       t@@ -222,10 +231,10 @@ class Blockchain(util.PrintError):
                                  prev_hash=parent.get_hash(forkpoint-1))
                open(self.path(), 'w+').close()
                self.save_header(header)
       -        # put into global dict
       +        # put into global dict. note that in some cases
       +        # save_header might have already put it there but that's OK
                chain_id = self.get_id()
                with blockchains_lock:
       -            assert chain_id not in blockchains, (chain_id, list(blockchains))
                    blockchains[chain_id] = self
                return self
        
       t@@ -392,7 +401,7 @@ class Blockchain(util.PrintError):
                delta = header.get('block_height') - self.forkpoint
                data = bfh(serialize_header(header))
                # headers are only _appended_ to the end:
       -        assert delta == self.size()
       +        assert delta == self.size(), (delta, self.size())
                assert len(data) == HEADER_SIZE
                self.write(data, delta*HEADER_SIZE)
                self.swap_with_parent()