tpersist channel db on disk. verify channel gossip sigs. - electrum - Electrum Bitcoin wallet
 (HTM) git clone https://git.parazyd.org/electrum
 (DIR) Log
 (DIR) Files
 (DIR) Refs
 (DIR) Submodules
       ---
 (DIR) commit a5b44d25b01be8aac2e6f661ba42f8e9930f97ff
 (DIR) parent c1d182601483be4829526a686036e09247a15099
 (HTM) Author: SomberNight <somber.night@protonmail.com>
       Date:   Mon, 23 Jul 2018 20:49:44 +0200
       
       persist channel db on disk. verify channel gossip sigs.
       
       Diffstat:
         M electrum/constants.py               |       5 +++++
         M electrum/ecc.py                     |       2 +-
         A electrum/lnchanannverifier.py       |     168 +++++++++++++++++++++++++++++++
         M electrum/lnrouter.py                |     180 ++++++++++++++++++++++++++-----
         M electrum/lnutil.py                  |      13 +++++++++++--
         M electrum/network.py                 |       3 ++-
         M electrum/tests/test_lnrouter.py     |      76 ++++++++++++++++++++++---------
         M electrum/util.py                    |       4 +++-
       
       8 files changed, 396 insertions(+), 55 deletions(-)
       ---
 (DIR) diff --git a/electrum/constants.py b/electrum/constants.py
       t@@ -27,6 +27,7 @@ import os
        import json
        
        from .util import inv_dict
       +from . import bitcoin
        
        
        def read_json(filename, default):
       t@@ -49,6 +50,10 @@ class AbstractNet:
            def max_checkpoint(cls) -> int:
                return max(0, len(cls.CHECKPOINTS) * 2016 - 1)
        
       +    @classmethod
       +    def rev_genesis_bytes(cls) -> bytes:
       +        return bytes.fromhex(bitcoin.rev_hex(cls.GENESIS))
       +
        
        class BitcoinMainnet(AbstractNet):
        
 (DIR) diff --git a/electrum/ecc.py b/electrum/ecc.py
       t@@ -313,7 +313,7 @@ def msg_magic(message: bytes) -> bytes:
            return b"\x18Bitcoin Signed Message:\n" + length + message
        
        
       -def verify_signature(pubkey, sig, h):
       +def verify_signature(pubkey: bytes, sig: bytes, h: bytes) -> bool:
            try:
                ECPubkey(pubkey).verify_message_hash(sig, h)
            except:
 (DIR) diff --git a/electrum/lnchanannverifier.py b/electrum/lnchanannverifier.py
       t@@ -0,0 +1,168 @@
       +# -*- coding: utf-8 -*-
       +#
       +# Electrum - lightweight Bitcoin client
       +# Copyright (C) 2018 The Electrum developers
       +#
       +# Permission is hereby granted, free of charge, to any person
       +# obtaining a copy of this software and associated documentation files
       +# (the "Software"), to deal in the Software without restriction,
       +# including without limitation the rights to use, copy, modify, merge,
       +# publish, distribute, sublicense, and/or sell copies of the Software,
       +# and to permit persons to whom the Software is furnished to do so,
       +# subject to the following conditions:
       +#
       +# The above copyright notice and this permission notice shall be
       +# included in all copies or substantial portions of the Software.
       +#
       +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
       +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
       +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
       +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
       +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
       +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
       +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
       +# SOFTWARE.
       +
       +import threading
       +
       +from . import lnbase
       +from . import bitcoin
       +from . import ecc
       +from .util import ThreadJob, bh2u, bfh
       +from .lnutil import invert_short_channel_id, funding_output_script_from_keys
       +from .verifier import verify_tx_is_in_block, MerkleVerificationFailure
       +from .transaction import Transaction
       +
       +
       +class LNChanAnnVerifier(ThreadJob):
       +    """ Verify channel announcements for the Channel DB """
       +
       +    def __init__(self, network, channel_db):
       +        self.network = network
       +        self.channel_db = channel_db
       +        self.lock = threading.Lock()
       +
       +        # items only removed when whole verification succeeds for them.
       +        # fixme: if it fails, it will never succeed
       +        self.started_verifying_channel = set()  # short_channel_id
       +
       +        self.unverified_channel_info = {}  # short_channel_id -> channel_info
       +
       +    def add_new_channel_info(self, channel_info):
       +        short_channel_id = channel_info.channel_id
       +        if short_channel_id in self.unverified_channel_info:
       +            return
       +        if not verify_sigs_for_channel_announcement(channel_info.msg_payload):
       +            return
       +        with self.lock:
       +            self.unverified_channel_info[short_channel_id] = channel_info
       +
       +    def get_pending_channel_info(self, short_channel_id):
       +        return self.unverified_channel_info.get(short_channel_id, None)
       +
       +    def run(self):
       +        interface = self.network.interface
       +        if not interface:
       +            return
       +
       +        blockchain = interface.blockchain
       +        if not blockchain:
       +            return
       +
       +        with self.lock:
       +            unverified_channel_info = list(self.unverified_channel_info)
       +
       +        for short_channel_id in unverified_channel_info:
       +            if short_channel_id in self.started_verifying_channel:
       +                continue
       +            block_height, tx_pos, output_idx = invert_short_channel_id(short_channel_id)
       +            # only resolve short_channel_id if headers are available.
       +            header = blockchain.read_header(block_height)
       +            if header is None:
       +                index = block_height // 2016
       +                if index < len(blockchain.checkpoints):
       +                    self.network.request_chunk(interface, index)
       +                continue
       +            callback = lambda resp, short_channel_id=short_channel_id: self.on_txid_and_merkle(resp, short_channel_id)
       +            self.network.get_txid_from_txpos(block_height, tx_pos, True,
       +                                             callback=callback)
       +            #self.print_error('requested short_channel_id', bh2u(short_channel_id))
       +            with self.lock:
       +                self.started_verifying_channel.add(short_channel_id)
       +
       +    def on_txid_and_merkle(self, response, short_channel_id):
       +        if response.get('error'):
       +            self.print_error('received an error:', response)
       +            return
       +        result = response['result']
       +        tx_hash = result['tx_hash']
       +        merkle_branch = result['merkle']
       +        block_height, tx_pos, output_idx = invert_short_channel_id(short_channel_id)
       +        header = self.network.blockchain().read_header(block_height)
       +        try:
       +            verify_tx_is_in_block(tx_hash, merkle_branch, tx_pos, header, block_height)
       +        except MerkleVerificationFailure as e:
       +            self.print_error(str(e))
       +            return
       +        callback = lambda resp, short_channel_id=short_channel_id: self.on_tx_response(resp, short_channel_id)
       +        self.network.get_transaction(tx_hash, callback=callback)
       +
       +    def on_tx_response(self, response, short_channel_id):
       +        if response.get('error'):
       +            self.print_error('received an error:', response)
       +            return
       +        params = response['params']
       +        result = response['result']
       +        tx_hash = params[0]
       +        tx = Transaction(result)
       +        try:
       +            tx.deserialize()
       +        except Exception:
       +            self.print_msg("cannot deserialize transaction, skipping", tx_hash)
       +            return
       +        if tx_hash != tx.txid():
       +            self.print_error("received tx does not match expected txid ({} != {})"
       +                             .format(tx_hash, tx.txid()))
       +            return
       +        # check funding output
       +        channel_info = self.unverified_channel_info[short_channel_id]
       +        chan_ann = channel_info.msg_payload
       +        redeem_script = funding_output_script_from_keys(chan_ann['bitcoin_key_1'], chan_ann['bitcoin_key_2'])
       +        expected_address = bitcoin.redeem_script_to_address('p2wsh', redeem_script)
       +        output_idx = invert_short_channel_id(short_channel_id)[2]
       +        try:
       +            actual_output = tx.outputs()[output_idx]
       +        except IndexError:
       +            return
       +        if expected_address != actual_output[1]:
       +            return
       +        # put channel into channel DB
       +        channel_info.set_capacity(actual_output[2])
       +        self.channel_db.add_verified_channel_info(short_channel_id, channel_info)
       +        # remove channel from unverified
       +        with self.lock:
       +            self.unverified_channel_info.pop(short_channel_id, None)
       +            try: self.started_verifying_channel.remove(short_channel_id)
       +            except KeyError: pass
       +
       +
       +def verify_sigs_for_channel_announcement(chan_ann: dict) -> bool:
       +    msg_bytes = lnbase.gen_msg('channel_announcement', **chan_ann)
       +    pre_hash = msg_bytes[2+256:]
       +    h = bitcoin.Hash(pre_hash)
       +    pubkeys = [chan_ann['node_id_1'], chan_ann['node_id_2'], chan_ann['bitcoin_key_1'], chan_ann['bitcoin_key_2']]
       +    sigs = [chan_ann['node_signature_1'], chan_ann['node_signature_2'], chan_ann['bitcoin_signature_1'], chan_ann['bitcoin_signature_2']]
       +    for pubkey, sig in zip(pubkeys, sigs):
       +        if not ecc.verify_signature(pubkey, sig, h):
       +            return False
       +    return True
       +
       +
       +def verify_sig_for_channel_update(chan_upd: dict, node_id: bytes) -> bool:
       +    msg_bytes = lnbase.gen_msg('channel_update', **chan_upd)
       +    pre_hash = msg_bytes[2+64:]
       +    h = bitcoin.Hash(pre_hash)
       +    sig = chan_upd['signature']
       +    if not ecc.verify_signature(node_id, sig, h):
       +        return False
       +    return True
 (DIR) diff --git a/electrum/lnrouter.py b/electrum/lnrouter.py
       t@@ -30,8 +30,11 @@ import sys
        import binascii
        import hashlib
        import hmac
       +import os
       +import json
       +import threading
        from collections import namedtuple, defaultdict
       -from typing import Sequence, Union, Tuple
       +from typing import Sequence, Union, Tuple, Optional
        
        from cryptography.hazmat.primitives.ciphers import Cipher, algorithms
        from cryptography.hazmat.backends import default_backend
       t@@ -39,9 +42,12 @@ from cryptography.hazmat.backends import default_backend
        from . import bitcoin
        from . import ecc
        from . import crypto
       +from . import constants
        from .crypto import sha256
       -from .util import PrintError, bh2u, profiler, xor_bytes
       +from .util import PrintError, bh2u, profiler, xor_bytes, get_headers_dir, bfh
        from .lnutil import get_ecdh
       +from .storage import JsonDB
       +from .lnchanannverifier import LNChanAnnVerifier, verify_sig_for_channel_update
        
        
        class ChannelInfo(PrintError):
       t@@ -54,23 +60,71 @@ class ChannelInfo(PrintError):
                assert type(self.node_id_2) is bytes
                assert list(sorted([self.node_id_1, self.node_id_2])) == [self.node_id_1, self.node_id_2]
        
       +        self.features_len = channel_announcement_payload['len']
       +        self.features = channel_announcement_payload['features']
       +        self.bitcoin_key_1 = channel_announcement_payload['bitcoin_key_1']
       +        self.bitcoin_key_2 = channel_announcement_payload['bitcoin_key_2']
       +
       +        # this field does not get persisted
       +        self.msg_payload = channel_announcement_payload
       +
                self.capacity_sat = None
                self.policy_node1 = None
                self.policy_node2 = None
        
       +    def to_json(self) -> dict:
       +        d = {}
       +        d['short_channel_id'] = bh2u(self.channel_id)
       +        d['node_id_1'] = bh2u(self.node_id_1)
       +        d['node_id_2'] = bh2u(self.node_id_2)
       +        d['len'] = bh2u(self.features_len)
       +        d['features'] = bh2u(self.features)
       +        d['bitcoin_key_1'] = bh2u(self.bitcoin_key_1)
       +        d['bitcoin_key_2'] = bh2u(self.bitcoin_key_2)
       +        d['policy_node1'] = self.policy_node1
       +        d['policy_node2'] = self.policy_node2
       +        d['capacity_sat'] = self.capacity_sat
       +        return d
       +
       +    @classmethod
       +    def from_json(cls, d: dict):
       +        d2 = {}
       +        d2['short_channel_id'] = bfh(d['short_channel_id'])
       +        d2['node_id_1'] = bfh(d['node_id_1'])
       +        d2['node_id_2'] = bfh(d['node_id_2'])
       +        d2['len'] = bfh(d['len'])
       +        d2['features'] = bfh(d['features'])
       +        d2['bitcoin_key_1'] = bfh(d['bitcoin_key_1'])
       +        d2['bitcoin_key_2'] = bfh(d['bitcoin_key_2'])
       +        ci = ChannelInfo(d2)
       +        ci.capacity_sat = d['capacity_sat']
       +        ci.policy_node1 = ChannelInfoDirectedPolicy.from_json(d['policy_node1'])
       +        ci.policy_node2 = ChannelInfoDirectedPolicy.from_json(d['policy_node2'])
       +        return ci
       +
            def set_capacity(self, capacity):
       -        # TODO call this after looking up UTXO for funding txn on chain
                self.capacity_sat = capacity
        
            def on_channel_update(self, msg_payload):
                assert self.channel_id == msg_payload['short_channel_id']
                flags = int.from_bytes(msg_payload['flags'], 'big')
                direction = flags & 1
       +        new_policy = ChannelInfoDirectedPolicy(msg_payload)
       +        if direction == 0:
       +            old_policy = self.policy_node1
       +            node_id = self.node_id_1
       +        else:
       +            old_policy = self.policy_node2
       +            node_id = self.node_id_2
       +        if old_policy and old_policy.timestamp >= new_policy.timestamp:
       +            return  # ignore
       +        if not verify_sig_for_channel_update(msg_payload, node_id):
       +            return  # ignore
       +        # save new policy
                if direction == 0:
       -            self.policy_node1 = ChannelInfoDirectedPolicy(msg_payload)
       +            self.policy_node1 = new_policy
                else:
       -            self.policy_node2 = ChannelInfoDirectedPolicy(msg_payload)
       -        #self.print_error('channel update', binascii.hexlify(self.channel_id).decode("ascii"), flags)
       +            self.policy_node2 = new_policy
        
            def get_policy_for_node(self, node_id):
                if node_id == self.node_id_1:
       t@@ -84,51 +138,121 @@ class ChannelInfo(PrintError):
        class ChannelInfoDirectedPolicy:
        
            def __init__(self, channel_update_payload):
       -        self.cltv_expiry_delta           = channel_update_payload['cltv_expiry_delta']
       -        self.htlc_minimum_msat           = channel_update_payload['htlc_minimum_msat']
       -        self.fee_base_msat               = channel_update_payload['fee_base_msat']
       -        self.fee_proportional_millionths = channel_update_payload['fee_proportional_millionths']
       -        self.cltv_expiry_delta = int.from_bytes(self.cltv_expiry_delta, "big")
       -        self.htlc_minimum_msat = int.from_bytes(self.htlc_minimum_msat, "big")
       -        self.fee_base_msat = int.from_bytes(self.fee_base_msat, "big")
       -        self.fee_proportional_millionths = int.from_bytes(self.fee_proportional_millionths, "big")
       +        cltv_expiry_delta           = channel_update_payload['cltv_expiry_delta']
       +        htlc_minimum_msat           = channel_update_payload['htlc_minimum_msat']
       +        fee_base_msat               = channel_update_payload['fee_base_msat']
       +        fee_proportional_millionths = channel_update_payload['fee_proportional_millionths']
       +        flags                       = channel_update_payload['flags']
       +        timestamp                   = channel_update_payload['timestamp']
       +
       +        self.cltv_expiry_delta           = int.from_bytes(cltv_expiry_delta, "big")
       +        self.htlc_minimum_msat           = int.from_bytes(htlc_minimum_msat, "big")
       +        self.fee_base_msat               = int.from_bytes(fee_base_msat, "big")
       +        self.fee_proportional_millionths = int.from_bytes(fee_proportional_millionths, "big")
       +        self.flags                       = int.from_bytes(flags, "big")
       +        self.timestamp                   = int.from_bytes(timestamp, "big")
       +
       +    def to_json(self) -> dict:
       +        d = {}
       +        d['cltv_expiry_delta'] = self.cltv_expiry_delta
       +        d['htlc_minimum_msat'] = self.htlc_minimum_msat
       +        d['fee_base_msat'] = self.fee_base_msat
       +        d['fee_proportional_millionths'] = self.fee_proportional_millionths
       +        d['flags'] = self.flags
       +        d['timestamp'] = self.timestamp
       +        return d
        
       +    @classmethod
       +    def from_json(cls, d: dict):
       +        if d is None: return None
       +        d2 = {}
       +        d2['cltv_expiry_delta'] = d['cltv_expiry_delta'].to_bytes(2, "big")
       +        d2['htlc_minimum_msat'] = d['htlc_minimum_msat'].to_bytes(8, "big")
       +        d2['fee_base_msat'] = d['fee_base_msat'].to_bytes(4, "big")
       +        d2['fee_proportional_millionths'] = d['fee_proportional_millionths'].to_bytes(4, "big")
       +        d2['flags'] = d['flags'].to_bytes(2, "big")
       +        d2['timestamp'] = d['timestamp'].to_bytes(4, "big")
       +        return ChannelInfoDirectedPolicy(d2)
       +
       +
       +class ChannelDB(JsonDB):
        
       -class ChannelDB(PrintError):
       +    def __init__(self, network):
       +        self.network = network
        
       -    def __init__(self):
       +        path = os.path.join(get_headers_dir(network.config), 'channel_db')
       +        JsonDB.__init__(self, path)
       +
       +        self.lock = threading.Lock()
                self._id_to_channel_info = {}
                self._channels_for_node = defaultdict(set)  # node -> set(short_channel_id)
        
       +        self.ca_verifier = LNChanAnnVerifier(network, self)
       +        self.network.add_jobs([self.ca_verifier])
       +
       +        self.load_data()
       +
       +    def load_data(self):
       +        if os.path.exists(self.path):
       +            with open(self.path, "r", encoding='utf-8') as f:
       +                raw = f.read()
       +                self.data = json.loads(raw)
       +        channel_infos = self.get('channel_infos', {})
       +        for short_channel_id, channel_info_d in channel_infos.items():
       +            channel_info = ChannelInfo.from_json(channel_info_d)
       +            short_channel_id = bfh(short_channel_id)
       +            self.add_verified_channel_info(short_channel_id, channel_info)
       +
       +    def save_data(self):
       +        with self.lock:
       +            channel_infos = {}
       +            for short_channel_id, channel_info in self._id_to_channel_info.items():
       +                channel_infos[bh2u(short_channel_id)] = channel_info
       +            self.put('channel_infos', channel_infos)
       +        self.write()
       +
            def __len__(self):
                return len(self._id_to_channel_info)
        
       -    def get_channel_info(self, channel_id):
       +    def get_channel_info(self, channel_id) -> Optional[ChannelInfo]:
                return self._id_to_channel_info.get(channel_id, None)
        
            def get_channels_for_node(self, node_id):
                """Returns the set of channels that have node_id as one of the endpoints."""
                return self._channels_for_node[node_id]
        
       +    def add_verified_channel_info(self, short_channel_id: bytes, channel_info: ChannelInfo):
       +        with self.lock:
       +            self._id_to_channel_info[short_channel_id] = channel_info
       +            self._channels_for_node[channel_info.node_id_1].add(short_channel_id)
       +            self._channels_for_node[channel_info.node_id_2].add(short_channel_id)
       +
            def on_channel_announcement(self, msg_payload):
                short_channel_id = msg_payload['short_channel_id']
       -        #self.print_error('channel announcement', binascii.hexlify(short_channel_id).decode("ascii"))
       -        channel_info = ChannelInfo(msg_payload)
                if short_channel_id in self._id_to_channel_info:
       -            self.print_error("IGNORING CHANNEL ANNOUNCEMENT, WE ALREADY KNOW THIS CHANNEL")
                    return
       -        self._id_to_channel_info[short_channel_id] = channel_info
       -        self._channels_for_node[channel_info.node_id_1].add(short_channel_id)
       -        self._channels_for_node[channel_info.node_id_2].add(short_channel_id)
       +        if constants.net.rev_genesis_bytes() != msg_payload['chain_hash']:
       +            return
       +        channel_info = ChannelInfo(msg_payload)
       +        self.ca_verifier.add_new_channel_info(channel_info)
        
            def on_channel_update(self, msg_payload):
                short_channel_id = msg_payload['short_channel_id']
       -        try:
       -            channel_info = self._id_to_channel_info[short_channel_id]
       -        except KeyError:
       +        if constants.net.rev_genesis_bytes() != msg_payload['chain_hash']:
       +            return
       +        # try finding channel in verified db
       +        channel_info = self._id_to_channel_info.get(short_channel_id, None)
       +        if channel_info is None:
       +            # try finding channel in pending db
       +            channel_info = self.ca_verifier.get_pending_channel_info(short_channel_id)
       +        if channel_info is None:
       +            # try finding channel in verified db, again
       +            # (maybe this is redundant but this should prevent a race..)
       +            channel_info = self._id_to_channel_info.get(short_channel_id, None)
       +        if channel_info is None:
                    self.print_error("could not find", short_channel_id)
       -        else:
       -            channel_info.on_channel_update(msg_payload)
       +            return
       +        channel_info.on_channel_update(msg_payload)
        
            def remove_channel(self, short_channel_id):
                try:
 (DIR) diff --git a/electrum/lnutil.py b/electrum/lnutil.py
       t@@ -344,8 +344,11 @@ def sign_and_get_sig_string(tx, local_config, remote_config):
            sig_64 = sig_string_from_der_sig(sig[:-1])
            return sig_64
        
       -def funding_output_script(local_config, remote_config):
       -    pubkeys = sorted([bh2u(local_config.multisig_key.pubkey), bh2u(remote_config.multisig_key.pubkey)])
       +def funding_output_script(local_config, remote_config) -> str:
       +    return funding_output_script_from_keys(local_config.multisig_key.pubkey, remote_config.multisig_key.pubkey)
       +
       +def funding_output_script_from_keys(pubkey1: bytes, pubkey2: bytes) -> str:
       +    pubkeys = sorted([bh2u(pubkey1), bh2u(pubkey2)])
            return transaction.multisig_script(pubkeys, 2)
        
        def calc_short_channel_id(block_height: int, tx_pos_in_block: int, output_index: int) -> bytes:
       t@@ -354,6 +357,12 @@ def calc_short_channel_id(block_height: int, tx_pos_in_block: int, output_index:
            oi = output_index.to_bytes(2, byteorder='big')
            return bh + tpos + oi
        
       +def invert_short_channel_id(short_channel_id: bytes) -> (int, int, int):
       +    bh = int.from_bytes(short_channel_id[:3], byteorder='big')
       +    tpos = int.from_bytes(short_channel_id[3:6], byteorder='big')
       +    oi = int.from_bytes(short_channel_id[6:8], byteorder='big')
       +    return bh, tpos, oi
       +
        def get_obscured_ctn(ctn, local, remote):
            mask = int.from_bytes(sha256(local + remote)[-6:], 'big')
            return ctn ^ mask
 (DIR) diff --git a/electrum/network.py b/electrum/network.py
       t@@ -301,7 +301,7 @@ class Network(Logger):
        
                # lightning network
                self.lightning_nodes = {}
       -        self.channel_db = lnrouter.ChannelDB()
       +        self.channel_db = lnrouter.ChannelDB(self)
                self.path_finder = lnrouter.LNPathFinder(self.channel_db)
                self.lnwatcher = lnwatcher.LNWatcher(self)
        
       t@@ -1183,6 +1183,7 @@ class Network(Logger):
            def stop(self):
                assert self._loop_thread != threading.current_thread(), 'must not be called from network thread'
                fut = asyncio.run_coroutine_threadsafe(self._stop(full_shutdown=True), self.asyncio_loop)
       +        self.channel_db.save_data()
                try:
                    fut.result(timeout=2)
                except (asyncio.TimeoutError, asyncio.CancelledError): pass
 (DIR) diff --git a/electrum/tests/test_lnrouter.py b/electrum/tests/test_lnrouter.py
       t@@ -1,11 +1,17 @@
        import unittest
       +import tempfile
       +import shutil
        
        from electrum.util import bh2u, bfh
        from electrum.lnbase import Peer
        from electrum.lnrouter import OnionHopsDataSingle, new_onion_packet, OnionPerHop
        from electrum import bitcoin, lnrouter
       +from electrum.simple_config import SimpleConfig
       +from electrum import lnchanannverifier
        
       -class Test_LNRouter(unittest.TestCase):
       +from . import TestCaseForTestnet
       +
       +class Test_LNRouter(TestCaseForTestnet):
        
            #@staticmethod
            #def parse_witness_list(witness_bytes):
       t@@ -21,12 +27,26 @@ class Test_LNRouter(unittest.TestCase):
            #    assert witness_bytes == b"", witness_bytes
            #    return res
        
       +    @classmethod
       +    def setUpClass(cls):
       +        super().setUpClass()
       +        cls.electrum_path = tempfile.mkdtemp()
       +        cls.config = SimpleConfig({'electrum_path': cls.electrum_path})
        
       +    @classmethod
       +    def tearDownClass(cls):
       +        super().tearDownClass()
       +        shutil.rmtree(cls.electrum_path)
        
            def test_find_path_for_payment(self):
                class fake_network:
       -            channel_db = lnrouter.ChannelDB()
       +            config = self.config
                    trigger_callback = lambda x: None
       +            add_jobs = lambda *args: None
       +        fake_network.channel_db = lnrouter.ChannelDB(fake_network())
       +        def no_verify_add_new_channel_info(channel_info):
       +            fake_network.channel_db.add_verified_channel_info(channel_info.channel_id, channel_info)
       +        fake_network.channel_db.ca_verifier.add_new_channel_info = no_verify_add_new_channel_info
                class fake_ln_worker:
                    path_finder = lnrouter.LNPathFinder(fake_network.channel_db)
                    privkey = bitcoin.sha256('privkeyseed')
       t@@ -34,27 +54,39 @@ class Test_LNRouter(unittest.TestCase):
                    channels = []
                    invoices = {}
                    channels_for_peer = lambda x: []
       -        p = Peer(fake_ln_worker, '', 0, 'a')
       -        p.on_channel_announcement({'node_id_1': b'b', 'node_id_2': b'c', 'short_channel_id': bfh('0000000000000001')})
       -        p.on_channel_announcement({'node_id_1': b'b', 'node_id_2': b'e', 'short_channel_id': bfh('0000000000000002')})
       -        p.on_channel_announcement({'node_id_1': b'a', 'node_id_2': b'b', 'short_channel_id': bfh('0000000000000003')})
       -        p.on_channel_announcement({'node_id_1': b'c', 'node_id_2': b'd', 'short_channel_id': bfh('0000000000000004')})
       -        p.on_channel_announcement({'node_id_1': b'd', 'node_id_2': b'e', 'short_channel_id': bfh('0000000000000005')})
       -        p.on_channel_announcement({'node_id_1': b'a', 'node_id_2': b'd', 'short_channel_id': bfh('0000000000000006')})
       +        p = Peer(fake_ln_worker, '', 0, b'\x02aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')
       +        p.on_channel_announcement({'node_id_1': b'\x02bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb',
       +                                   'node_id_2': b'\x02cccccccccccccccccccccccccccccccc',
       +                                   'short_channel_id': bfh('0000000000000001'), 'chain_hash': bfh('43497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000')})
       +        p.on_channel_announcement({'node_id_1': b'\x02bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb',
       +                                   'node_id_2': b'\x02eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee',
       +                                   'short_channel_id': bfh('0000000000000002'), 'chain_hash': bfh('43497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000')})
       +        p.on_channel_announcement({'node_id_1': b'\x02aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
       +                                   'node_id_2': b'\x02bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb',
       +                                   'short_channel_id': bfh('0000000000000003'), 'chain_hash': bfh('43497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000')})
       +        p.on_channel_announcement({'node_id_1': b'\x02cccccccccccccccccccccccccccccccc',
       +                                   'node_id_2': b'\x02dddddddddddddddddddddddddddddddd',
       +                                   'short_channel_id': bfh('0000000000000004'), 'chain_hash': bfh('43497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000')})
       +        p.on_channel_announcement({'node_id_1': b'\x02dddddddddddddddddddddddddddddddd',
       +                                   'node_id_2': b'\x02eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee',
       +                                   'short_channel_id': bfh('0000000000000005'), 'chain_hash': bfh('43497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000')})
       +        p.on_channel_announcement({'node_id_1': b'\x02aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
       +                                   'node_id_2': b'\x02dddddddddddddddddddddddddddddddd',
       +                                   'short_channel_id': bfh('0000000000000006'), 'chain_hash': bfh('43497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000')})
                o = lambda i: i.to_bytes(8, "big")
       -        p.on_channel_update({'short_channel_id': bfh('0000000000000001'), 'flags': b'\x00', 'cltv_expiry_delta': o(10), 'htlc_minimum_msat': o(250), 'fee_base_msat': o(100), 'fee_proportional_millionths': o(150)})
       -        p.on_channel_update({'short_channel_id': bfh('0000000000000001'), 'flags': b'\x01', 'cltv_expiry_delta': o(10), 'htlc_minimum_msat': o(250), 'fee_base_msat': o(100), 'fee_proportional_millionths': o(150)})
       -        p.on_channel_update({'short_channel_id': bfh('0000000000000002'), 'flags': b'\x00', 'cltv_expiry_delta': o(99), 'htlc_minimum_msat': o(250), 'fee_base_msat': o(100), 'fee_proportional_millionths': o(150)})
       -        p.on_channel_update({'short_channel_id': bfh('0000000000000002'), 'flags': b'\x01', 'cltv_expiry_delta': o(10), 'htlc_minimum_msat': o(250), 'fee_base_msat': o(100), 'fee_proportional_millionths': o(150)})
       -        p.on_channel_update({'short_channel_id': bfh('0000000000000003'), 'flags': b'\x01', 'cltv_expiry_delta': o(10), 'htlc_minimum_msat': o(250), 'fee_base_msat': o(100), 'fee_proportional_millionths': o(150)})
       -        p.on_channel_update({'short_channel_id': bfh('0000000000000003'), 'flags': b'\x00', 'cltv_expiry_delta': o(10), 'htlc_minimum_msat': o(250), 'fee_base_msat': o(100), 'fee_proportional_millionths': o(150)})
       -        p.on_channel_update({'short_channel_id': bfh('0000000000000004'), 'flags': b'\x01', 'cltv_expiry_delta': o(10), 'htlc_minimum_msat': o(250), 'fee_base_msat': o(100), 'fee_proportional_millionths': o(150)})
       -        p.on_channel_update({'short_channel_id': bfh('0000000000000004'), 'flags': b'\x00', 'cltv_expiry_delta': o(10), 'htlc_minimum_msat': o(250), 'fee_base_msat': o(100), 'fee_proportional_millionths': o(150)})
       -        p.on_channel_update({'short_channel_id': bfh('0000000000000005'), 'flags': b'\x01', 'cltv_expiry_delta': o(10), 'htlc_minimum_msat': o(250), 'fee_base_msat': o(100), 'fee_proportional_millionths': o(150)})
       -        p.on_channel_update({'short_channel_id': bfh('0000000000000005'), 'flags': b'\x00', 'cltv_expiry_delta': o(10), 'htlc_minimum_msat': o(250), 'fee_base_msat': o(100), 'fee_proportional_millionths': o(999)})
       -        p.on_channel_update({'short_channel_id': bfh('0000000000000006'), 'flags': b'\x00', 'cltv_expiry_delta': o(10), 'htlc_minimum_msat': o(250), 'fee_base_msat': o(100), 'fee_proportional_millionths': o(99999999)})
       -        p.on_channel_update({'short_channel_id': bfh('0000000000000006'), 'flags': b'\x01', 'cltv_expiry_delta': o(10), 'htlc_minimum_msat': o(250), 'fee_base_msat': o(100), 'fee_proportional_millionths': o(150)})
       -        self.assertNotEqual(None, fake_ln_worker.path_finder.find_path_for_payment(b'a', b'e', 100000))
       +        p.on_channel_update({'short_channel_id': bfh('0000000000000001'), 'flags': b'\x00', 'cltv_expiry_delta': o(10), 'htlc_minimum_msat': o(250), 'fee_base_msat': o(100), 'fee_proportional_millionths': o(150), 'chain_hash': bfh('43497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000')})
       +        p.on_channel_update({'short_channel_id': bfh('0000000000000001'), 'flags': b'\x01', 'cltv_expiry_delta': o(10), 'htlc_minimum_msat': o(250), 'fee_base_msat': o(100), 'fee_proportional_millionths': o(150), 'chain_hash': bfh('43497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000')})
       +        p.on_channel_update({'short_channel_id': bfh('0000000000000002'), 'flags': b'\x00', 'cltv_expiry_delta': o(99), 'htlc_minimum_msat': o(250), 'fee_base_msat': o(100), 'fee_proportional_millionths': o(150), 'chain_hash': bfh('43497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000')})
       +        p.on_channel_update({'short_channel_id': bfh('0000000000000002'), 'flags': b'\x01', 'cltv_expiry_delta': o(10), 'htlc_minimum_msat': o(250), 'fee_base_msat': o(100), 'fee_proportional_millionths': o(150), 'chain_hash': bfh('43497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000')})
       +        p.on_channel_update({'short_channel_id': bfh('0000000000000003'), 'flags': b'\x01', 'cltv_expiry_delta': o(10), 'htlc_minimum_msat': o(250), 'fee_base_msat': o(100), 'fee_proportional_millionths': o(150), 'chain_hash': bfh('43497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000')})
       +        p.on_channel_update({'short_channel_id': bfh('0000000000000003'), 'flags': b'\x00', 'cltv_expiry_delta': o(10), 'htlc_minimum_msat': o(250), 'fee_base_msat': o(100), 'fee_proportional_millionths': o(150), 'chain_hash': bfh('43497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000')})
       +        p.on_channel_update({'short_channel_id': bfh('0000000000000004'), 'flags': b'\x01', 'cltv_expiry_delta': o(10), 'htlc_minimum_msat': o(250), 'fee_base_msat': o(100), 'fee_proportional_millionths': o(150), 'chain_hash': bfh('43497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000')})
       +        p.on_channel_update({'short_channel_id': bfh('0000000000000004'), 'flags': b'\x00', 'cltv_expiry_delta': o(10), 'htlc_minimum_msat': o(250), 'fee_base_msat': o(100), 'fee_proportional_millionths': o(150), 'chain_hash': bfh('43497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000')})
       +        p.on_channel_update({'short_channel_id': bfh('0000000000000005'), 'flags': b'\x01', 'cltv_expiry_delta': o(10), 'htlc_minimum_msat': o(250), 'fee_base_msat': o(100), 'fee_proportional_millionths': o(150), 'chain_hash': bfh('43497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000')})
       +        p.on_channel_update({'short_channel_id': bfh('0000000000000005'), 'flags': b'\x00', 'cltv_expiry_delta': o(10), 'htlc_minimum_msat': o(250), 'fee_base_msat': o(100), 'fee_proportional_millionths': o(999), 'chain_hash': bfh('43497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000')})
       +        p.on_channel_update({'short_channel_id': bfh('0000000000000006'), 'flags': b'\x00', 'cltv_expiry_delta': o(10), 'htlc_minimum_msat': o(250), 'fee_base_msat': o(100), 'fee_proportional_millionths': o(99999999), 'chain_hash': bfh('43497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000')})
       +        p.on_channel_update({'short_channel_id': bfh('0000000000000006'), 'flags': b'\x01', 'cltv_expiry_delta': o(10), 'htlc_minimum_msat': o(250), 'fee_base_msat': o(100), 'fee_proportional_millionths': o(150), 'chain_hash': bfh('43497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000')})
       +        self.assertNotEqual(None, fake_ln_worker.path_finder.find_path_for_payment(b'\x02aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', b'\x02eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee', 100000))
        
        
        
 (DIR) diff --git a/electrum/util.py b/electrum/util.py
       t@@ -217,7 +217,9 @@ class MyEncoder(json.JSONEncoder):
                    return obj.isoformat(' ')[:-3]
                if isinstance(obj, set):
                    return list(obj)
       -        return super().default(obj)
       +        if hasattr(obj, 'to_json') and callable(obj.to_json):
       +            return obj.to_json()
       +        return super(MyEncoder, self).default(obj)
        
        
        class ThreadJob(Logger):