1# Electrum - lightweight Bitcoin client
2# Copyright (C) 2012 thomasv@ecdsa.org
3#
4# Permission is hereby granted, free of charge, to any person
5# obtaining a copy of this software and associated documentation files
6# (the "Software"), to deal in the Software without restriction,
7# including without limitation the rights to use, copy, modify, merge,
8# publish, distribute, sublicense, and/or sell copies of the Software,
9# and to permit persons to whom the Software is furnished to do so,
10# subject to the following conditions:
11#
12# The above copyright notice and this permission notice shall be
13# included in all copies or substantial portions of the Software.
14#
15# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
16# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
17# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
18# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
19# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
20# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
21# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22# SOFTWARE.
23import os
24import threading
25import time
26from typing import Optional, Dict, Mapping, Sequence
27
28from . import util
29from .bitcoin import hash_encode, int_to_hex, rev_hex
30from .crypto import sha256d
31from . import constants
32from .util import bfh, bh2u, with_lock
33from .simple_config import SimpleConfig
34from .logging import get_logger, Logger
35
36
37_logger = get_logger(__name__)
38
39HEADER_SIZE = 80  # bytes
40MAX_TARGET = 0x00000000FFFF0000000000000000000000000000000000000000000000000000
41
42
43class MissingHeader(Exception):
44    pass
45
46class InvalidHeader(Exception):
47    pass
48
49def serialize_header(header_dict: dict) -> str:
50    s = int_to_hex(header_dict['version'], 4) \
51        + rev_hex(header_dict['prev_block_hash']) \
52        + rev_hex(header_dict['merkle_root']) \
53        + int_to_hex(int(header_dict['timestamp']), 4) \
54        + int_to_hex(int(header_dict['bits']), 4) \
55        + int_to_hex(int(header_dict['nonce']), 4)
56    return s
57
58def deserialize_header(s: bytes, height: int) -> dict:
59    if not s:
60        raise InvalidHeader('Invalid header: {}'.format(s))
61    if len(s) != HEADER_SIZE:
62        raise InvalidHeader('Invalid header length: {}'.format(len(s)))
63    hex_to_int = lambda s: int.from_bytes(s, byteorder='little')
64    h = {}
65    h['version'] = hex_to_int(s[0:4])
66    h['prev_block_hash'] = hash_encode(s[4:36])
67    h['merkle_root'] = hash_encode(s[36:68])
68    h['timestamp'] = hex_to_int(s[68:72])
69    h['bits'] = hex_to_int(s[72:76])
70    h['nonce'] = hex_to_int(s[76:80])
71    h['block_height'] = height
72    return h
73
74def hash_header(header: dict) -> str:
75    if header is None:
76        return '0' * 64
77    if header.get('prev_block_hash') is None:
78        header['prev_block_hash'] = '00'*32
79    return hash_raw_header(serialize_header(header))
80
81
82def hash_raw_header(header: str) -> str:
83    return hash_encode(sha256d(bfh(header)))
84
85
86# key: blockhash hex at forkpoint
87# the chain at some key is the best chain that includes the given hash
88blockchains = {}  # type: Dict[str, Blockchain]
89blockchains_lock = threading.RLock()  # lock order: take this last; so after Blockchain.lock
90
91
92def read_blockchains(config: 'SimpleConfig'):
93    best_chain = Blockchain(config=config,
94                            forkpoint=0,
95                            parent=None,
96                            forkpoint_hash=constants.net.GENESIS,
97                            prev_hash=None)
98    blockchains[constants.net.GENESIS] = best_chain
99    # consistency checks
100    if best_chain.height() > constants.net.max_checkpoint():
101        header_after_cp = best_chain.read_header(constants.net.max_checkpoint()+1)
102        if not header_after_cp or not best_chain.can_connect(header_after_cp, check_height=False):
103            _logger.info("[blockchain] deleting best chain. cannot connect header after last cp to last cp.")
104            os.unlink(best_chain.path())
105            best_chain.update_size()
106    # forks
107    fdir = os.path.join(util.get_headers_dir(config), 'forks')
108    util.make_dir(fdir)
109    # files are named as: fork2_{forkpoint}_{prev_hash}_{first_hash}
110    l = filter(lambda x: x.startswith('fork2_') and '.' not in x, os.listdir(fdir))
111    l = sorted(l, key=lambda x: int(x.split('_')[1]))  # sort by forkpoint
112
113    def delete_chain(filename, reason):
114        _logger.info(f"[blockchain] deleting chain {filename}: {reason}")
115        os.unlink(os.path.join(fdir, filename))
116
117    def instantiate_chain(filename):
118        __, forkpoint, prev_hash, first_hash = filename.split('_')
119        forkpoint = int(forkpoint)
120        prev_hash = (64-len(prev_hash)) * "0" + prev_hash  # left-pad with zeroes
121        first_hash = (64-len(first_hash)) * "0" + first_hash
122        # forks below the max checkpoint are not allowed
123        if forkpoint <= constants.net.max_checkpoint():
124            delete_chain(filename, "deleting fork below max checkpoint")
125            return
126        # find parent (sorting by forkpoint guarantees it's already instantiated)
127        for parent in blockchains.values():
128            if parent.check_hash(forkpoint - 1, prev_hash):
129                break
130        else:
131            delete_chain(filename, "cannot find parent for chain")
132            return
133        b = Blockchain(config=config,
134                       forkpoint=forkpoint,
135                       parent=parent,
136                       forkpoint_hash=first_hash,
137                       prev_hash=prev_hash)
138        # consistency checks
139        h = b.read_header(b.forkpoint)
140        if first_hash != hash_header(h):
141            delete_chain(filename, "incorrect first hash for chain")
142            return
143        if not b.parent.can_connect(h, check_height=False):
144            delete_chain(filename, "cannot connect chain to parent")
145            return
146        chain_id = b.get_id()
147        assert first_hash == chain_id, (first_hash, chain_id)
148        blockchains[chain_id] = b
149
150    for filename in l:
151        instantiate_chain(filename)
152
153
154def get_best_chain() -> 'Blockchain':
155    return blockchains[constants.net.GENESIS]
156
157# block hash -> chain work; up to and including that block
158_CHAINWORK_CACHE = {
159    "0000000000000000000000000000000000000000000000000000000000000000": 0,  # virtual block at height -1
160}  # type: Dict[str, int]
161
162
163def init_headers_file_for_best_chain():
164    b = get_best_chain()
165    filename = b.path()
166    length = HEADER_SIZE * len(constants.net.CHECKPOINTS) * 2016
167    if not os.path.exists(filename) or os.path.getsize(filename) < length:
168        with open(filename, 'wb') as f:
169            if length > 0:
170                f.seek(length - 1)
171                f.write(b'\x00')
172        util.ensure_sparse_file(filename)
173    with b.lock:
174        b.update_size()
175
176
177class Blockchain(Logger):
178    """
179    Manages blockchain headers and their verification
180    """
181
182    def __init__(self, config: SimpleConfig, forkpoint: int, parent: Optional['Blockchain'],
183                 forkpoint_hash: str, prev_hash: Optional[str]):
184        assert isinstance(forkpoint_hash, str) and len(forkpoint_hash) == 64, forkpoint_hash
185        assert (prev_hash is None) or (isinstance(prev_hash, str) and len(prev_hash) == 64), prev_hash
186        # assert (parent is None) == (forkpoint == 0)
187        if 0 < forkpoint <= constants.net.max_checkpoint():
188            raise Exception(f"cannot fork below max checkpoint. forkpoint: {forkpoint}")
189        Logger.__init__(self)
190        self.config = config
191        self.forkpoint = forkpoint  # height of first header
192        self.parent = parent
193        self._forkpoint_hash = forkpoint_hash  # blockhash at forkpoint. "first hash"
194        self._prev_hash = prev_hash  # blockhash immediately before forkpoint
195        self.lock = threading.RLock()
196        self.update_size()
197
198    @property
199    def checkpoints(self):
200        return constants.net.CHECKPOINTS
201
202    def get_max_child(self) -> Optional[int]:
203        children = self.get_direct_children()
204        return max([x.forkpoint for x in children]) if children else None
205
206    def get_max_forkpoint(self) -> int:
207        """Returns the max height where there is a fork
208        related to this chain.
209        """
210        mc = self.get_max_child()
211        return mc if mc is not None else self.forkpoint
212
213    def get_direct_children(self) -> Sequence['Blockchain']:
214        with blockchains_lock:
215            return list(filter(lambda y: y.parent==self, blockchains.values()))
216
217    def get_parent_heights(self) -> Mapping['Blockchain', int]:
218        """Returns map: (parent chain -> height of last common block)"""
219        with self.lock, blockchains_lock:
220            result = {self: self.height()}
221            chain = self
222            while True:
223                parent = chain.parent
224                if parent is None: break
225                result[parent] = chain.forkpoint - 1
226                chain = parent
227            return result
228
229    def get_height_of_last_common_block_with_chain(self, other_chain: 'Blockchain') -> int:
230        last_common_block_height = 0
231        our_parents = self.get_parent_heights()
232        their_parents = other_chain.get_parent_heights()
233        for chain in our_parents:
234            if chain in their_parents:
235                h = min(our_parents[chain], their_parents[chain])
236                last_common_block_height = max(last_common_block_height, h)
237        return last_common_block_height
238
239    @with_lock
240    def get_branch_size(self) -> int:
241        return self.height() - self.get_max_forkpoint() + 1
242
243    def get_name(self) -> str:
244        return self.get_hash(self.get_max_forkpoint()).lstrip('0')[0:10]
245
246    def check_header(self, header: dict) -> bool:
247        header_hash = hash_header(header)
248        height = header.get('block_height')
249        return self.check_hash(height, header_hash)
250
251    def check_hash(self, height: int, header_hash: str) -> bool:
252        """Returns whether the hash of the block at given height
253        is the given hash.
254        """
255        assert isinstance(header_hash, str) and len(header_hash) == 64, header_hash  # hex
256        try:
257            return header_hash == self.get_hash(height)
258        except Exception:
259            return False
260
261    def fork(parent, header: dict) -> 'Blockchain':
262        if not parent.can_connect(header, check_height=False):
263            raise Exception("forking header does not connect to parent chain")
264        forkpoint = header.get('block_height')
265        self = Blockchain(config=parent.config,
266                          forkpoint=forkpoint,
267                          parent=parent,
268                          forkpoint_hash=hash_header(header),
269                          prev_hash=parent.get_hash(forkpoint-1))
270        self.assert_headers_file_available(parent.path())
271        open(self.path(), 'w+').close()
272        self.save_header(header)
273        # put into global dict. note that in some cases
274        # save_header might have already put it there but that's OK
275        chain_id = self.get_id()
276        with blockchains_lock:
277            blockchains[chain_id] = self
278        return self
279
280    @with_lock
281    def height(self) -> int:
282        return self.forkpoint + self.size() - 1
283
284    @with_lock
285    def size(self) -> int:
286        return self._size
287
288    @with_lock
289    def update_size(self) -> None:
290        p = self.path()
291        self._size = os.path.getsize(p)//HEADER_SIZE if os.path.exists(p) else 0
292
293    @classmethod
294    def verify_header(cls, header: dict, prev_hash: str, target: int, expected_header_hash: str=None) -> None:
295        _hash = hash_header(header)
296        if expected_header_hash and expected_header_hash != _hash:
297            raise Exception("hash mismatches with expected: {} vs {}".format(expected_header_hash, _hash))
298        if prev_hash != header.get('prev_block_hash'):
299            raise Exception("prev hash mismatch: %s vs %s" % (prev_hash, header.get('prev_block_hash')))
300        if constants.net.TESTNET:
301            return
302        bits = cls.target_to_bits(target)
303        if bits != header.get('bits'):
304            raise Exception("bits mismatch: %s vs %s" % (bits, header.get('bits')))
305        block_hash_as_num = int.from_bytes(bfh(_hash), byteorder='big')
306        if block_hash_as_num > target:
307            raise Exception(f"insufficient proof of work: {block_hash_as_num} vs target {target}")
308
309    def verify_chunk(self, index: int, data: bytes) -> None:
310        num = len(data) // HEADER_SIZE
311        start_height = index * 2016
312        prev_hash = self.get_hash(start_height - 1)
313        target = self.get_target(index-1)
314        for i in range(num):
315            height = start_height + i
316            try:
317                expected_header_hash = self.get_hash(height)
318            except MissingHeader:
319                expected_header_hash = None
320            raw_header = data[i*HEADER_SIZE : (i+1)*HEADER_SIZE]
321            header = deserialize_header(raw_header, index*2016 + i)
322            self.verify_header(header, prev_hash, target, expected_header_hash)
323            prev_hash = hash_header(header)
324
325    @with_lock
326    def path(self):
327        d = util.get_headers_dir(self.config)
328        if self.parent is None:
329            filename = 'blockchain_headers'
330        else:
331            assert self.forkpoint > 0, self.forkpoint
332            prev_hash = self._prev_hash.lstrip('0')
333            first_hash = self._forkpoint_hash.lstrip('0')
334            basename = f'fork2_{self.forkpoint}_{prev_hash}_{first_hash}'
335            filename = os.path.join('forks', basename)
336        return os.path.join(d, filename)
337
338    @with_lock
339    def save_chunk(self, index: int, chunk: bytes):
340        assert index >= 0, index
341        chunk_within_checkpoint_region = index < len(self.checkpoints)
342        # chunks in checkpoint region are the responsibility of the 'main chain'
343        if chunk_within_checkpoint_region and self.parent is not None:
344            main_chain = get_best_chain()
345            main_chain.save_chunk(index, chunk)
346            return
347
348        delta_height = (index * 2016 - self.forkpoint)
349        delta_bytes = delta_height * HEADER_SIZE
350        # if this chunk contains our forkpoint, only save the part after forkpoint
351        # (the part before is the responsibility of the parent)
352        if delta_bytes < 0:
353            chunk = chunk[-delta_bytes:]
354            delta_bytes = 0
355        truncate = not chunk_within_checkpoint_region
356        self.write(chunk, delta_bytes, truncate)
357        self.swap_with_parent()
358
359    def swap_with_parent(self) -> None:
360        with self.lock, blockchains_lock:
361            # do the swap; possibly multiple ones
362            cnt = 0
363            while True:
364                old_parent = self.parent
365                if not self._swap_with_parent():
366                    break
367                # make sure we are making progress
368                cnt += 1
369                if cnt > len(blockchains):
370                    raise Exception(f'swapping fork with parent too many times: {cnt}')
371                # we might have become the parent of some of our former siblings
372                for old_sibling in old_parent.get_direct_children():
373                    if self.check_hash(old_sibling.forkpoint - 1, old_sibling._prev_hash):
374                        old_sibling.parent = self
375
376    def _swap_with_parent(self) -> bool:
377        """Check if this chain became stronger than its parent, and swap
378        the underlying files if so. The Blockchain instances will keep
379        'containing' the same headers, but their ids change and so
380        they will be stored in different files."""
381        if self.parent is None:
382            return False
383        if self.parent.get_chainwork() >= self.get_chainwork():
384            return False
385        self.logger.info(f"swapping {self.forkpoint} {self.parent.forkpoint}")
386        parent_branch_size = self.parent.height() - self.forkpoint + 1
387        forkpoint = self.forkpoint  # type: Optional[int]
388        parent = self.parent  # type: Optional[Blockchain]
389        child_old_id = self.get_id()
390        parent_old_id = parent.get_id()
391        # swap files
392        # child takes parent's name
393        # parent's new name will be something new (not child's old name)
394        self.assert_headers_file_available(self.path())
395        child_old_name = self.path()
396        with open(self.path(), 'rb') as f:
397            my_data = f.read()
398        self.assert_headers_file_available(parent.path())
399        assert forkpoint > parent.forkpoint, (f"forkpoint of parent chain ({parent.forkpoint}) "
400                                              f"should be at lower height than children's ({forkpoint})")
401        with open(parent.path(), 'rb') as f:
402            f.seek((forkpoint - parent.forkpoint)*HEADER_SIZE)
403            parent_data = f.read(parent_branch_size*HEADER_SIZE)
404        self.write(parent_data, 0)
405        parent.write(my_data, (forkpoint - parent.forkpoint)*HEADER_SIZE)
406        # swap parameters
407        self.parent, parent.parent = parent.parent, self  # type: Optional[Blockchain], Optional[Blockchain]
408        self.forkpoint, parent.forkpoint = parent.forkpoint, self.forkpoint
409        self._forkpoint_hash, parent._forkpoint_hash = parent._forkpoint_hash, hash_raw_header(bh2u(parent_data[:HEADER_SIZE]))
410        self._prev_hash, parent._prev_hash = parent._prev_hash, self._prev_hash
411        # parent's new name
412        os.replace(child_old_name, parent.path())
413        self.update_size()
414        parent.update_size()
415        # update pointers
416        blockchains.pop(child_old_id, None)
417        blockchains.pop(parent_old_id, None)
418        blockchains[self.get_id()] = self
419        blockchains[parent.get_id()] = parent
420        return True
421
422    def get_id(self) -> str:
423        return self._forkpoint_hash
424
425    def assert_headers_file_available(self, path):
426        if os.path.exists(path):
427            return
428        elif not os.path.exists(util.get_headers_dir(self.config)):
429            raise FileNotFoundError('Electrum headers_dir does not exist. Was it deleted while running?')
430        else:
431            raise FileNotFoundError('Cannot find headers file but headers_dir is there. Should be at {}'.format(path))
432
433    @with_lock
434    def write(self, data: bytes, offset: int, truncate: bool=True) -> None:
435        filename = self.path()
436        self.assert_headers_file_available(filename)
437        with open(filename, 'rb+') as f:
438            if truncate and offset != self._size * HEADER_SIZE:
439                f.seek(offset)
440                f.truncate()
441            f.seek(offset)
442            f.write(data)
443            f.flush()
444            os.fsync(f.fileno())
445        self.update_size()
446
447    @with_lock
448    def save_header(self, header: dict) -> None:
449        delta = header.get('block_height') - self.forkpoint
450        data = bfh(serialize_header(header))
451        # headers are only _appended_ to the end:
452        assert delta == self.size(), (delta, self.size())
453        assert len(data) == HEADER_SIZE
454        self.write(data, delta*HEADER_SIZE)
455        self.swap_with_parent()
456
457    @with_lock
458    def read_header(self, height: int) -> Optional[dict]:
459        if height < 0:
460            return
461        if height < self.forkpoint:
462            return self.parent.read_header(height)
463        if height > self.height():
464            return
465        delta = height - self.forkpoint
466        name = self.path()
467        self.assert_headers_file_available(name)
468        with open(name, 'rb') as f:
469            f.seek(delta * HEADER_SIZE)
470            h = f.read(HEADER_SIZE)
471            if len(h) < HEADER_SIZE:
472                raise Exception('Expected to read a full header. This was only {} bytes'.format(len(h)))
473        if h == bytes([0])*HEADER_SIZE:
474            return None
475        return deserialize_header(h, height)
476
477    def header_at_tip(self) -> Optional[dict]:
478        """Return latest header."""
479        height = self.height()
480        return self.read_header(height)
481
482    def is_tip_stale(self) -> bool:
483        STALE_DELAY = 8 * 60 * 60  # in seconds
484        header = self.header_at_tip()
485        if not header:
486            return True
487        # note: We check the timestamp only in the latest header.
488        #       The Bitcoin consensus has a lot of leeway here:
489        #       - needs to be greater than the median of the timestamps of the past 11 blocks, and
490        #       - up to at most 2 hours into the future compared to local clock
491        #       so there is ~2 hours of leeway in either direction
492        if header['timestamp'] + STALE_DELAY < time.time():
493            return True
494        return False
495
496    def get_hash(self, height: int) -> str:
497        def is_height_checkpoint():
498            within_cp_range = height <= constants.net.max_checkpoint()
499            at_chunk_boundary = (height+1) % 2016 == 0
500            return within_cp_range and at_chunk_boundary
501
502        if height == -1:
503            return '0000000000000000000000000000000000000000000000000000000000000000'
504        elif height == 0:
505            return constants.net.GENESIS
506        elif is_height_checkpoint():
507            index = height // 2016
508            h, t = self.checkpoints[index]
509            return h
510        else:
511            header = self.read_header(height)
512            if header is None:
513                raise MissingHeader(height)
514            return hash_header(header)
515
516    def get_target(self, index: int) -> int:
517        # compute target from chunk x, used in chunk x+1
518        if constants.net.TESTNET:
519            return 0
520        if index == -1:
521            return MAX_TARGET
522        if index < len(self.checkpoints):
523            h, t = self.checkpoints[index]
524            return t
525        # new target
526        first = self.read_header(index * 2016)
527        last = self.read_header(index * 2016 + 2015)
528        if not first or not last:
529            raise MissingHeader()
530        bits = last.get('bits')
531        target = self.bits_to_target(bits)
532        nActualTimespan = last.get('timestamp') - first.get('timestamp')
533        nTargetTimespan = 14 * 24 * 60 * 60
534        nActualTimespan = max(nActualTimespan, nTargetTimespan // 4)
535        nActualTimespan = min(nActualTimespan, nTargetTimespan * 4)
536        new_target = min(MAX_TARGET, (target * nActualTimespan) // nTargetTimespan)
537        # not any target can be represented in 32 bits:
538        new_target = self.bits_to_target(self.target_to_bits(new_target))
539        return new_target
540
541    @classmethod
542    def bits_to_target(cls, bits: int) -> int:
543        bitsN = (bits >> 24) & 0xff
544        if not (0x03 <= bitsN <= 0x1d):
545            raise Exception("First part of bits should be in [0x03, 0x1d]")
546        bitsBase = bits & 0xffffff
547        if not (0x8000 <= bitsBase <= 0x7fffff):
548            raise Exception("Second part of bits should be in [0x8000, 0x7fffff]")
549        return bitsBase << (8 * (bitsN-3))
550
551    @classmethod
552    def target_to_bits(cls, target: int) -> int:
553        c = ("%064x" % target)[2:]
554        while c[:2] == '00' and len(c) > 6:
555            c = c[2:]
556        bitsN, bitsBase = len(c) // 2, int.from_bytes(bfh(c[:6]), byteorder='big')
557        if bitsBase >= 0x800000:
558            bitsN += 1
559            bitsBase >>= 8
560        return bitsN << 24 | bitsBase
561
562    def chainwork_of_header_at_height(self, height: int) -> int:
563        """work done by single header at given height"""
564        chunk_idx = height // 2016 - 1
565        target = self.get_target(chunk_idx)
566        work = ((2 ** 256 - target - 1) // (target + 1)) + 1
567        return work
568
569    @with_lock
570    def get_chainwork(self, height=None) -> int:
571        if height is None:
572            height = max(0, self.height())
573        if constants.net.TESTNET:
574            # On testnet/regtest, difficulty works somewhat different.
575            # It's out of scope to properly implement that.
576            return height
577        last_retarget = height // 2016 * 2016 - 1
578        cached_height = last_retarget
579        while _CHAINWORK_CACHE.get(self.get_hash(cached_height)) is None:
580            if cached_height <= -1:
581                break
582            cached_height -= 2016
583        assert cached_height >= -1, cached_height
584        running_total = _CHAINWORK_CACHE[self.get_hash(cached_height)]
585        while cached_height < last_retarget:
586            cached_height += 2016
587            work_in_single_header = self.chainwork_of_header_at_height(cached_height)
588            work_in_chunk = 2016 * work_in_single_header
589            running_total += work_in_chunk
590            _CHAINWORK_CACHE[self.get_hash(cached_height)] = running_total
591        cached_height += 2016
592        work_in_single_header = self.chainwork_of_header_at_height(cached_height)
593        work_in_last_partial_chunk = (height % 2016 + 1) * work_in_single_header
594        return running_total + work_in_last_partial_chunk
595
596    def can_connect(self, header: dict, check_height: bool=True) -> bool:
597        if header is None:
598            return False
599        height = header['block_height']
600        if check_height and self.height() != height - 1:
601            return False
602        if height == 0:
603            return hash_header(header) == constants.net.GENESIS
604        try:
605            prev_hash = self.get_hash(height - 1)
606        except:
607            return False
608        if prev_hash != header.get('prev_block_hash'):
609            return False
610        try:
611            target = self.get_target(height // 2016 - 1)
612        except MissingHeader:
613            return False
614        try:
615            self.verify_header(header, prev_hash, target)
616        except BaseException as e:
617            return False
618        return True
619
620    def connect_chunk(self, idx: int, hexdata: str) -> bool:
621        assert idx >= 0, idx
622        try:
623            data = bfh(hexdata)
624            self.verify_chunk(idx, data)
625            self.save_chunk(idx, data)
626            return True
627        except BaseException as e:
628            self.logger.info(f'verify_chunk idx {idx} failed: {repr(e)}')
629            return False
630
631    def get_checkpoints(self):
632        # for each chunk, store the hash of the last block and the target after the chunk
633        cp = []
634        n = self.height() // 2016
635        for index in range(n):
636            h = self.get_hash((index+1) * 2016 -1)
637            target = self.get_target(index)
638            cp.append((h, target))
639        return cp
640
641
642def check_header(header: dict) -> Optional[Blockchain]:
643    """Returns any Blockchain that contains header, or None."""
644    if type(header) is not dict:
645        return None
646    with blockchains_lock: chains = list(blockchains.values())
647    for b in chains:
648        if b.check_header(header):
649            return b
650    return None
651
652
653def can_connect(header: dict) -> Optional[Blockchain]:
654    """Returns the Blockchain that has a tip that directly links up
655    with header, or None.
656    """
657    with blockchains_lock: chains = list(blockchains.values())
658    for b in chains:
659        if b.can_connect(header):
660            return b
661    return None
662
663
664def get_chains_that_contain_header(height: int, header_hash: str) -> Sequence[Blockchain]:
665    """Returns a list of Blockchains that contain header, best chain first."""
666    with blockchains_lock: chains = list(blockchains.values())
667    chains = [chain for chain in chains
668              if chain.check_hash(height=height, header_hash=header_hash)]
669    chains = sorted(chains, key=lambda x: x.get_chainwork(), reverse=True)
670    return chains
671