1from collections import OrderedDict
2from datetime import datetime
3from fixtures import *  # noqa: F401,F403
4from flaky import flaky  # noqa: F401
5from hashlib import sha256
6from pyln.client import RpcError, Millisatoshi
7from pyln.proto import Invoice
8from utils import (
9    DEVELOPER, only_one, sync_blockheight, TIMEOUT, wait_for, TEST_NETWORK,
10    DEPRECATED_APIS, expected_peer_features, expected_node_features,
11    expected_channel_features, account_balance,
12    check_coin_moves, first_channel_id, check_coin_moves_idx,
13    EXPERIMENTAL_FEATURES, EXPERIMENTAL_DUAL_FUND
14)
15
16import ast
17import json
18import os
19import pytest
20import random
21import re
22import signal
23import sqlite3
24import stat
25import subprocess
26import time
27import unittest
28
29
30def test_option_passthrough(node_factory, directory):
31    """ Ensure that registering options works.
32
33    First attempts without the plugin and then with the plugin.
34    """
35    plugin_path = os.path.join(os.getcwd(), 'contrib/plugins/helloworld.py')
36
37    help_out = subprocess.check_output([
38        'lightningd/lightningd',
39        '--lightning-dir={}'.format(directory),
40        '--help'
41    ]).decode('utf-8')
42    assert('--greeting' not in help_out)
43
44    help_out = subprocess.check_output([
45        'lightningd/lightningd',
46        '--lightning-dir={}'.format(directory),
47        '--plugin={}'.format(plugin_path),
48        '--help'
49    ]).decode('utf-8')
50    assert('--greeting' in help_out)
51
52    # Now try to see if it gets accepted, would fail to start if the
53    # option didn't exist
54    n = node_factory.get_node(options={'plugin': plugin_path, 'greeting': 'Ciao'})
55    n.stop()
56
57
58def test_option_types(node_factory):
59    """Ensure that desired types of options are
60       respected in output """
61
62    plugin_path = os.path.join(os.getcwd(), 'tests/plugins/options.py')
63    n = node_factory.get_node(options={
64        'plugin': plugin_path,
65        'str_opt': 'ok',
66        'int_opt': 22,
67        'bool_opt': True,
68    })
69
70    assert n.daemon.is_in_log(r"option str_opt ok <class 'str'>")
71    assert n.daemon.is_in_log(r"option int_opt 22 <class 'int'>")
72    assert n.daemon.is_in_log(r"option bool_opt True <class 'bool'>")
73    # flag options aren't passed through if not flagged on
74    assert not n.daemon.is_in_log(r"option flag_opt")
75    n.stop()
76
77    # A blank bool_opt should default to false
78    n = node_factory.get_node(options={
79        'plugin': plugin_path, 'str_opt': 'ok',
80        'int_opt': 22,
81        'bool_opt': 'true',
82        'flag_opt': None,
83    })
84
85    assert n.daemon.is_in_log(r"option bool_opt True <class 'bool'>")
86    assert n.daemon.is_in_log(r"option flag_opt True <class 'bool'>")
87    n.stop()
88
89    # What happens if we give it a bad bool-option?
90    n = node_factory.get_node(options={
91        'plugin': plugin_path,
92        'str_opt': 'ok',
93        'int_opt': 22,
94        'bool_opt': '!',
95    }, expect_fail=True, may_fail=True)
96
97    # the node should fail to start, and we get a stderr msg
98    assert not n.daemon.running
99    assert n.daemon.is_in_stderr('bool_opt: ! does not parse as type bool')
100
101    # What happens if we give it a bad int-option?
102    n = node_factory.get_node(options={
103        'plugin': plugin_path,
104        'str_opt': 'ok',
105        'int_opt': 'notok',
106        'bool_opt': 1,
107    }, may_fail=True, expect_fail=True)
108
109    # the node should fail to start, and we get a stderr msg
110    assert not n.daemon.running
111    assert n.daemon.is_in_stderr('--int_opt: notok does not parse as type int')
112
113    # Flag opts shouldn't allow any input
114    n = node_factory.get_node(options={
115        'plugin': plugin_path,
116        'str_opt': 'ok',
117        'int_opt': 11,
118        'bool_opt': 1,
119        'flag_opt': True,
120    }, may_fail=True, expect_fail=True)
121
122    # the node should fail to start, and we get a stderr msg
123    assert not n.daemon.running
124    assert n.daemon.is_in_stderr("--flag_opt: doesn't allow an argument")
125
126    n = node_factory.get_node(options={
127        'plugin': plugin_path,
128        'str_optm': ['ok', 'ok2'],
129        'int_optm': [11, 12, 13],
130    })
131
132    assert n.daemon.is_in_log(r"option str_optm \['ok', 'ok2'\] <class 'list'>")
133    assert n.daemon.is_in_log(r"option int_optm \[11, 12, 13\] <class 'list'>")
134    n.stop()
135
136
137def test_millisatoshi_passthrough(node_factory):
138    """ Ensure that Millisatoshi arguments and return work.
139    """
140    plugin_path = os.path.join(os.getcwd(), 'tests/plugins/millisatoshis.py')
141    n = node_factory.get_node(options={'plugin': plugin_path, 'log-level': 'io'})
142
143    # By keyword
144    ret = n.rpc.call('echo', {'msat': Millisatoshi(17), 'not_an_msat': '22msat'})['echo_msat']
145    assert type(ret) == Millisatoshi
146    assert ret == Millisatoshi(17)
147
148    # By position
149    ret = n.rpc.call('echo', [Millisatoshi(18), '22msat'])['echo_msat']
150    assert type(ret) == Millisatoshi
151    assert ret == Millisatoshi(18)
152
153
154def test_rpc_passthrough(node_factory):
155    """Starting with a plugin exposes its RPC methods.
156
157    First check that the RPC method appears in the help output and
158    then try to call it.
159
160    """
161    plugin_path = os.path.join(os.getcwd(), 'contrib/plugins/helloworld.py')
162    n = node_factory.get_node(options={'plugin': plugin_path, 'greeting': 'Ciao'})
163
164    # Make sure that the 'hello' command that the helloworld.py plugin
165    # has registered is available.
166    cmd = [hlp for hlp in n.rpc.help()['help'] if 'hello' in hlp['command']]
167    assert(len(cmd) == 1)
168
169    # Make sure usage message is present.
170    assert only_one(n.rpc.help('hello')['help'])['command'] == 'hello [name]'
171    # While we're at it, let's check that helloworld.py is logging
172    # correctly via the notifications plugin->lightningd
173    assert n.daemon.is_in_log('Plugin helloworld.py initialized')
174
175    # Now try to call it and see what it returns:
176    greet = n.rpc.hello(name='World')
177    assert(greet == "Ciao World")
178    with pytest.raises(RpcError):
179        n.rpc.fail()
180
181    # Try to call a method without enough arguments
182    with pytest.raises(RpcError, match="processing bye: missing a required"
183                                       " argument"):
184        n.rpc.bye()
185
186
187def test_plugin_dir(node_factory):
188    """--plugin-dir works"""
189    plugin_dir = os.path.join(os.getcwd(), 'contrib/plugins')
190    node_factory.get_node(options={'plugin-dir': plugin_dir, 'greeting': 'Mars'})
191
192
193def test_plugin_slowinit(node_factory):
194    """Tests that the 'plugin' RPC command times out if plugin doesnt respond"""
195    os.environ['SLOWINIT_TIME'] = '61'
196    n = node_factory.get_node()
197
198    with pytest.raises(RpcError, match=': timed out before replying to init'):
199        n.rpc.plugin_start(os.path.join(os.getcwd(), "tests/plugins/slow_init.py"))
200
201    # It's not actually configured yet, see what happens;
202    # make sure 'rescan' and 'list' controls dont crash
203    n.rpc.plugin_rescan()
204    n.rpc.plugin_list()
205
206
207def test_plugin_command(node_factory):
208    """Tests the 'plugin' RPC command"""
209    n = node_factory.get_node()
210
211    # Make sure that the 'hello' command from the helloworld.py plugin
212    # is not available.
213    cmd = [hlp for hlp in n.rpc.help()["help"] if "hello" in hlp["command"]]
214    assert(len(cmd) == 0)
215
216    # Add the 'contrib/plugins' test dir
217    n.rpc.plugin_startdir(directory=os.path.join(os.getcwd(), "contrib/plugins"))
218    # Make sure that the 'hello' command from the helloworld.py plugin
219    # is now available.
220    cmd = [hlp for hlp in n.rpc.help()["help"] if "hello" in hlp["command"]]
221    assert(len(cmd) == 1)
222
223    # Make sure 'rescan' and 'list' subcommands dont crash
224    n.rpc.plugin_rescan()
225    n.rpc.plugin_list()
226
227    # Make sure the plugin behaves normally after stop and restart
228    assert("Successfully stopped helloworld.py."
229           == n.rpc.plugin_stop(plugin="helloworld.py")["result"])
230    n.daemon.wait_for_log(r"Killing plugin: stopped by lightningd via RPC")
231    n.rpc.plugin_start(plugin=os.path.join(os.getcwd(), "contrib/plugins/helloworld.py"))
232    n.daemon.wait_for_log(r"Plugin helloworld.py initialized")
233    assert("Hello world" == n.rpc.call(method="hello"))
234
235    # Now stop the helloworld plugin
236    assert("Successfully stopped helloworld.py."
237           == n.rpc.plugin_stop(plugin="helloworld.py")["result"])
238    n.daemon.wait_for_log(r"Killing plugin: stopped by lightningd via RPC")
239    # Make sure that the 'hello' command from the helloworld.py plugin
240    # is not available anymore.
241    cmd = [hlp for hlp in n.rpc.help()["help"] if "hello" in hlp["command"]]
242    assert(len(cmd) == 0)
243
244    # Test that we cannot start a plugin with 'dynamic' set to False in
245    # getmanifest
246    with pytest.raises(RpcError, match=r"Not a dynamic plugin"):
247        n.rpc.plugin_start(plugin=os.path.join(os.getcwd(), "tests/plugins/static.py"))
248
249    # Test that we cannot stop a started plugin with 'dynamic' flag set to
250    # False
251    n2 = node_factory.get_node(options={
252        "plugin": os.path.join(os.getcwd(), "tests/plugins/static.py")
253    })
254    with pytest.raises(RpcError, match=r"static.py cannot be managed when lightningd is up"):
255        n2.rpc.plugin_stop(plugin="static.py")
256
257    # Test that we don't crash when starting a broken plugin
258    with pytest.raises(RpcError, match=r": exited before replying to getmanifest"):
259        n2.rpc.plugin_start(plugin=os.path.join(os.getcwd(), "tests/plugins/broken.py"))
260
261    with pytest.raises(RpcError, match=r': timed out before replying to getmanifest'):
262        n2.rpc.plugin_start(os.path.join(os.getcwd(), 'contrib/plugins/fail/failtimeout.py'))
263
264    # Test that we can add a directory with more than one new plugin in it.
265    try:
266        n.rpc.plugin_startdir(os.path.join(os.getcwd(), "contrib/plugins"))
267    except RpcError:
268        pass
269
270    # Usually, it crashes after the above return.
271    n.rpc.stop()
272
273
274def test_plugin_disable(node_factory):
275    """--disable-plugin works"""
276    plugin_dir = os.path.join(os.getcwd(), 'contrib/plugins')
277    # We used to need plugin-dir before disable-plugin!
278    n = node_factory.get_node(options=OrderedDict([('plugin-dir', plugin_dir),
279                                                   ('disable-plugin',
280                                                    '{}/helloworld.py'
281                                                    .format(plugin_dir))]))
282    with pytest.raises(RpcError):
283        n.rpc.hello(name='Sun')
284    assert n.daemon.is_in_log('helloworld.py: disabled via disable-plugin')
285
286    # Also works by basename.
287    n = node_factory.get_node(options=OrderedDict([('plugin-dir', plugin_dir),
288                                                   ('disable-plugin',
289                                                    'helloworld.py')]))
290    with pytest.raises(RpcError):
291        n.rpc.hello(name='Sun')
292    assert n.daemon.is_in_log('helloworld.py: disabled via disable-plugin')
293
294    # Other order also works!
295    n = node_factory.get_node(options=OrderedDict([('disable-plugin',
296                                                    'helloworld.py'),
297                                                   ('plugin-dir', plugin_dir)]))
298    with pytest.raises(RpcError):
299        n.rpc.hello(name='Sun')
300    assert n.daemon.is_in_log('helloworld.py: disabled via disable-plugin')
301
302    # Both orders of explicit specification work.
303    n = node_factory.get_node(options=OrderedDict([('disable-plugin',
304                                                    'helloworld.py'),
305                                                   ('plugin',
306                                                    '{}/helloworld.py'
307                                                    .format(plugin_dir))]))
308    with pytest.raises(RpcError):
309        n.rpc.hello(name='Sun')
310    assert n.daemon.is_in_log('helloworld.py: disabled via disable-plugin')
311
312    # Both orders of explicit specification work.
313    n = node_factory.get_node(options=OrderedDict([('plugin',
314                                                    '{}/helloworld.py'
315                                                    .format(plugin_dir)),
316                                                   ('disable-plugin',
317                                                    'helloworld.py')]))
318    with pytest.raises(RpcError):
319        n.rpc.hello(name='Sun')
320    assert n.daemon.is_in_log('helloworld.py: disabled via disable-plugin')
321
322    # Still disabled if we load directory.
323    n.rpc.plugin_startdir(directory=os.path.join(os.getcwd(), "contrib/plugins"))
324    n.daemon.wait_for_log('helloworld.py: disabled via disable-plugin')
325
326    # Check that list works
327    n = node_factory.get_node(options={'disable-plugin':
328                                       ['something-else.py', 'helloworld.py']})
329
330    assert n.rpc.listconfigs()['disable-plugin'] == ['something-else.py', 'helloworld.py']
331
332
333def test_plugin_hook(node_factory, executor):
334    """The helloworld plugin registers a htlc_accepted hook.
335
336    The hook will sleep for a few seconds and log a
337    message. `lightningd` should wait for the response and only then
338    complete the payment.
339
340    """
341    l1, l2 = node_factory.line_graph(2, opts={'plugin': os.path.join(os.getcwd(), 'contrib/plugins/helloworld.py')})
342    start_time = time.time()
343    f = executor.submit(l1.pay, l2, 100000)
344    l2.daemon.wait_for_log(r'on_htlc_accepted called')
345
346    # The hook will sleep for 20 seconds before answering, so `f`
347    # should take at least that long.
348    f.result()
349    end_time = time.time()
350    assert(end_time >= start_time + 20)
351
352
353def test_plugin_connect_notifications(node_factory):
354    """ test 'connect' and 'disconnect' notifications
355    """
356    l1, l2 = node_factory.get_nodes(2, opts={'plugin': os.path.join(os.getcwd(), 'contrib/plugins/helloworld.py')})
357
358    l1.connect(l2)
359    l1.daemon.wait_for_log(r'Received connect event')
360    l2.daemon.wait_for_log(r'Received connect event')
361
362    l2.rpc.disconnect(l1.info['id'])
363    l1.daemon.wait_for_log(r'Received disconnect event')
364    l2.daemon.wait_for_log(r'Received disconnect event')
365
366
367def test_failing_plugins(directory):
368    fail_plugins = [
369        os.path.join(os.getcwd(), 'contrib/plugins/fail/failtimeout.py'),
370        os.path.join(os.getcwd(), 'contrib/plugins/fail/doesnotexist.py'),
371    ]
372
373    for p in fail_plugins:
374        with pytest.raises(subprocess.CalledProcessError):
375            subprocess.check_output([
376                'lightningd/lightningd',
377                '--lightning-dir={}'.format(directory),
378                '--plugin={}'.format(p),
379                '--help',
380            ])
381
382
383def test_pay_plugin(node_factory):
384    l1, l2 = node_factory.line_graph(2)
385    inv = l2.rpc.invoice(123000, 'label', 'description', 3700)
386
387    res = l1.rpc.pay(bolt11=inv['bolt11'])
388    assert res['status'] == 'complete'
389
390    with pytest.raises(RpcError, match=r'missing required parameter'):
391        l1.rpc.call('pay')
392
393    # Make sure usage messages are present.
394    msg = 'pay bolt11 [msatoshi] [label] [riskfactor] [maxfeepercent] '\
395          '[retry_for] [maxdelay] [exemptfee] [localofferid]'
396    if DEVELOPER:
397        msg += ' [use_shadow]'
398    assert only_one(l1.rpc.help('pay')['help'])['command'] == msg
399
400
401def test_plugin_connected_hook_chaining(node_factory):
402    """ l1 uses the logger_a, reject and logger_b plugin.
403
404    l1 is configured to accept connections from l2, but not from l3.
405    we check that logger_a is always called and logger_b only for l2.
406    """
407    opts = [{'plugin':
408             [os.path.join(os.getcwd(),
409                           'tests/plugins/peer_connected_logger_a.py'),
410              os.path.join(os.getcwd(),
411                           'tests/plugins/reject.py'),
412              os.path.join(os.getcwd(),
413                           'tests/plugins/peer_connected_logger_b.py')],
414             'allow_warning': True},
415            {},
416            {'allow_warning': True}]
417
418    l1, l2, l3 = node_factory.get_nodes(3, opts=opts)
419    l2id = l2.info['id']
420    l3id = l3.info['id']
421    l1.rpc.reject(l3.info['id'])
422
423    l2.connect(l1)
424    l1.daemon.wait_for_logs([
425        f"peer_connected_logger_a {l2id}",
426        f"{l2id} is allowed",
427        f"peer_connected_logger_b {l2id}"
428    ])
429    assert len(l1.rpc.listpeers(l2id)['peers']) == 1
430
431    l3.connect(l1)
432    l1.daemon.wait_for_logs([
433        f"peer_connected_logger_a {l3id}",
434        f"{l3id} is in reject list"
435    ])
436
437    # FIXME: this error occurs *after* connection, so we connect then drop.
438    l3.daemon.wait_for_log(r"chan#1: peer_in WIRE_WARNING")
439    l3.daemon.wait_for_log(r"You are in reject list")
440
441    def check_disconnect():
442        peers = l1.rpc.listpeers(l3id)['peers']
443        return peers == [] or not peers[0]['connected']
444
445    wait_for(check_disconnect)
446    assert not l3.daemon.is_in_log(f"peer_connected_logger_b {l3id}")
447
448
449def test_async_rpcmethod(node_factory, executor):
450    """This tests the async rpcmethods.
451
452    It works in conjunction with the `asynctest` plugin which stashes
453    requests and then resolves all of them on the fifth call.
454    """
455    l1 = node_factory.get_node(options={'plugin': os.path.join(os.getcwd(), 'tests/plugins/asynctest.py')})
456
457    results = []
458    for i in range(10):
459        results.append(executor.submit(l1.rpc.asyncqueue))
460
461    time.sleep(3)
462
463    # None of these should have returned yet
464    assert len([r for r in results if r.done()]) == 0
465
466    # This last one triggers the release and all results should be 42,
467    # since the last number is returned for all
468    l1.rpc.asyncflush(42)
469
470    assert [r.result() for r in results] == [42] * len(results)
471
472
473@unittest.skipIf(os.getenv('TEST_DB_PROVIDER', 'sqlite3') != 'sqlite3', "Only sqlite3 implements the db_write_hook currently")
474def test_db_hook(node_factory, executor):
475    """This tests the db hook."""
476    dbfile = os.path.join(node_factory.directory, "dblog.sqlite3")
477    l1 = node_factory.get_node(options={'plugin': os.path.join(os.getcwd(), 'tests/plugins/dblog.py'),
478                                        'dblog-file': dbfile})
479
480    # It should see the db being created, and sometime later actually get
481    # initted.
482    # This precedes startup, so needle already past
483    assert l1.daemon.is_in_log(r'plugin-dblog.py: deferring \d+ commands')
484    l1.daemon.logsearch_start = 0
485    l1.daemon.wait_for_log('plugin-dblog.py: replaying pre-init data:')
486    l1.daemon.wait_for_log('plugin-dblog.py: CREATE TABLE version \\(version INTEGER\\)')
487    l1.daemon.wait_for_log("plugin-dblog.py: initialized.* 'startup': True")
488
489    l1.stop()
490
491    # Databases should be identical.
492    db1 = sqlite3.connect(os.path.join(l1.daemon.lightning_dir, TEST_NETWORK, 'lightningd.sqlite3'))
493    db2 = sqlite3.connect(dbfile)
494
495    assert [x for x in db1.iterdump()] == [x for x in db2.iterdump()]
496
497
498@unittest.skipIf(os.getenv('TEST_DB_PROVIDER', 'sqlite3') != 'sqlite3', "Only sqlite3 implements the db_write_hook currently")
499def test_db_hook_multiple(node_factory, executor):
500    """This tests the db hook for multiple-plugin case."""
501    dbfile = os.path.join(node_factory.directory, "dblog.sqlite3")
502    l1 = node_factory.get_node(options={'plugin': os.path.join(os.getcwd(), 'tests/plugins/dblog.py'),
503                                        'important-plugin': os.path.join(os.getcwd(), 'tests/plugins/dbdummy.py'),
504                                        'dblog-file': dbfile})
505
506    # It should see the db being created, and sometime later actually get
507    # initted.
508    # This precedes startup, so needle already past
509    assert l1.daemon.is_in_log(r'plugin-dblog.py: deferring \d+ commands')
510    l1.daemon.logsearch_start = 0
511    l1.daemon.wait_for_log('plugin-dblog.py: replaying pre-init data:')
512    l1.daemon.wait_for_log('plugin-dblog.py: CREATE TABLE version \\(version INTEGER\\)')
513    l1.daemon.wait_for_log("plugin-dblog.py: initialized.* 'startup': True")
514
515    l1.stop()
516
517    # Databases should be identical.
518    db1 = sqlite3.connect(os.path.join(l1.daemon.lightning_dir, TEST_NETWORK, 'lightningd.sqlite3'))
519    db2 = sqlite3.connect(dbfile)
520
521    assert [x for x in db1.iterdump()] == [x for x in db2.iterdump()]
522
523
524def test_utf8_passthrough(node_factory, executor):
525    l1 = node_factory.get_node(options={'plugin': os.path.join(os.getcwd(), 'tests/plugins/utf8.py'),
526                                        'log-level': 'io'})
527
528    # This works because Python unmangles.
529    res = l1.rpc.call('utf8', ['ナンセンス 1杯'])
530    assert '\\u' not in res['utf8']
531    assert res['utf8'] == 'ナンセンス 1杯'
532
533    # Now, try native.
534    out = subprocess.check_output(['cli/lightning-cli',
535                                   '--network={}'.format(TEST_NETWORK),
536                                   '--lightning-dir={}'
537                                   .format(l1.daemon.lightning_dir),
538                                   'utf8', 'ナンセンス 1杯']).decode('utf-8')
539    assert '\\u' not in out
540    assert out == '{\n   "utf8": "ナンセンス 1杯"\n}\n'
541
542
543def test_invoice_payment_hook(node_factory):
544    """ l1 uses the reject-payment plugin to reject invoices with odd preimages.
545    """
546    opts = [{}, {'plugin': os.path.join(os.getcwd(), 'tests/plugins/reject_some_invoices.py')}]
547    l1, l2 = node_factory.line_graph(2, opts=opts)
548
549    # This one works
550    inv1 = l2.rpc.invoice(1230, 'label', 'description', preimage='1' * 64)
551    l1.rpc.pay(inv1['bolt11'])
552
553    l2.daemon.wait_for_log('label=label')
554    l2.daemon.wait_for_log('msat=')
555    l2.daemon.wait_for_log('preimage=' + '1' * 64)
556
557    # This one will be rejected.
558    inv2 = l2.rpc.invoice(1230, 'label2', 'description', preimage='0' * 64)
559    with pytest.raises(RpcError):
560        l1.rpc.pay(inv2['bolt11'])
561
562    pstatus = l1.rpc.call('paystatus', [inv2['bolt11']])['pay'][0]
563    assert pstatus['attempts'][-1]['failure']['data']['failcodename'] == 'WIRE_TEMPORARY_NODE_FAILURE'
564
565    l2.daemon.wait_for_log('label=label2')
566    l2.daemon.wait_for_log('msat=')
567    l2.daemon.wait_for_log('preimage=' + '0' * 64)
568
569
570def test_invoice_payment_hook_hold(node_factory):
571    """ l1 uses the hold_invoice plugin to delay invoice payment.
572    """
573    opts = [{}, {'plugin': os.path.join(os.getcwd(), 'tests/plugins/hold_invoice.py'), 'holdtime': TIMEOUT / 2}]
574    l1, l2 = node_factory.line_graph(2, opts=opts)
575
576    inv1 = l2.rpc.invoice(1230, 'label', 'description', preimage='1' * 64)
577    l1.rpc.pay(inv1['bolt11'])
578
579
580@pytest.mark.openchannel('v1')
581@pytest.mark.openchannel('v2')
582def test_openchannel_hook(node_factory, bitcoind):
583    """ l2 uses the reject_odd_funding_amounts plugin to reject some openings.
584    """
585    opts = [{}, {'plugin': os.path.join(os.getcwd(), 'tests/plugins/reject_odd_funding_amounts.py')}]
586    l1, l2 = node_factory.line_graph(2, fundchannel=False, opts=opts)
587    l1.fundwallet(10**6)
588
589    # Even amount: works.
590    l1.rpc.fundchannel(l2.info['id'], 100000)
591
592    # Make sure plugin got all the vars we expect
593    expected = {
594        'channel_flags': '1',
595        'dust_limit_satoshis': '546000msat',
596        'htlc_minimum_msat': '0msat',
597        'id': l1.info['id'],
598        'max_accepted_htlcs': '483',
599        'max_htlc_value_in_flight_msat': '18446744073709551615msat',
600        'to_self_delay': '5',
601    }
602
603    if l2.config('experimental-dual-fund'):
604        # openchannel2 var checks
605        expected.update({
606            'channel_id': '.*',
607            'commitment_feerate_per_kw': '7500',
608            'funding_feerate_per_kw': '7500',
609            'feerate_our_max': '150000',
610            'feerate_our_min': '1875',
611            'locktime': '.*',
612            'their_funding': '100000000msat',
613            'channel_max_msat': '16777215000msat',
614        })
615    else:
616        expected.update({
617            'channel_reserve_satoshis': '1000000msat',
618            'feerate_per_kw': '7500',
619            'funding_satoshis': '100000000msat',
620            'push_msat': '0msat',
621        })
622
623    l2.daemon.wait_for_log('reject_odd_funding_amounts.py: {} VARS'.format(len(expected)))
624    for k, v in expected.items():
625        assert l2.daemon.is_in_log('reject_odd_funding_amounts.py: {}={}'.format(k, v))
626
627    # Close it.
628    txid = l1.rpc.close(l2.info['id'])['txid']
629    bitcoind.generate_block(1, txid)
630    wait_for(lambda: [c['state'] for c in only_one(l1.rpc.listpeers(l2.info['id'])['peers'])['channels']] == ['ONCHAIN'])
631
632    # Odd amount: fails
633    l1.connect(l2)
634    with pytest.raises(RpcError, match=r"I don't like odd amounts"):
635        l1.rpc.fundchannel(l2.info['id'], 100001)
636
637
638@pytest.mark.openchannel('v1')
639@pytest.mark.openchannel('v2')
640def test_openchannel_hook_error_handling(node_factory, bitcoind):
641    """ l2 uses a plugin that should fatal() crash the node.
642
643    This is because the plugin rejects a channel while
644    also setting a close_to address which isn't allowed.
645    """
646    opts = {'plugin': os.path.join(os.getcwd(), 'tests/plugins/openchannel_hook_accepter.py')}
647    # openchannel_reject_but_set_close_to.py')}
648    l1 = node_factory.get_node()
649    l2 = node_factory.get_node(options=opts,
650                               expect_fail=True,
651                               may_fail=True,
652                               allow_broken_log=True)
653    l1.connect(l2)
654    l1.fundwallet(10**6)
655
656    # next fundchannel should fail fatal() for l2
657    with pytest.raises(RpcError, match=r'Owning subdaemon (openingd|dualopend) died'):
658        l1.rpc.fundchannel(l2.info['id'], 100004)
659    assert l2.daemon.is_in_log("BROKEN.*Plugin rejected openchannel[2]? but also set close_to")
660
661
662@pytest.mark.openchannel('v1')
663@pytest.mark.openchannel('v2')
664def test_openchannel_hook_chaining(node_factory, bitcoind):
665    """ l2 uses a set of plugin that all use the openchannel_hook.
666
667    We test that chaining works by using multiple plugins in a way
668    that we check for the first plugin that rejects prevents from evaluating
669    further plugin responses down the chain.
670
671    """
672    opts = [{}, {'plugin': [
673        os.path.join(os.path.dirname(__file__), '..', 'tests/plugins/openchannel_hook_accept.py'),
674        os.path.join(os.path.dirname(__file__), '..', 'tests/plugins/openchannel_hook_accepter.py'),
675        os.path.join(os.path.dirname(__file__), '..', 'tests/plugins/openchannel_hook_reject.py')
676    ]}]
677    l1, l2 = node_factory.line_graph(2, fundchannel=False, opts=opts)
678    l1.fundwallet(10**6)
679
680    hook_msg = "openchannel2? hook rejects and says '"
681    # 100005sat fundchannel should fail fatal() for l2
682    # because hook_accepter.py rejects on that amount 'for a reason'
683    with pytest.raises(RpcError, match=r'reject for a reason'):
684        l1.rpc.fundchannel(l2.info['id'], 100005)
685
686    assert l2.daemon.wait_for_log(hook_msg + "reject for a reason")
687    # first plugin in the chain was called
688    assert l2.daemon.is_in_log("accept on principle")
689    # the third plugin must now not be called anymore
690    assert not l2.daemon.is_in_log("reject on principle")
691
692    # 100000sat is good for hook_accepter, so it should fail 'on principle'
693    # at third hook openchannel_reject.py
694    with pytest.raises(RpcError, match=r'reject on principle'):
695        l1.rpc.connect(l2.info['id'], 'localhost', l2.port)
696        l1.rpc.fundchannel(l2.info['id'], 100000)
697    assert l2.daemon.wait_for_log(hook_msg + "reject on principle")
698
699
700@pytest.mark.openchannel('v1')
701@pytest.mark.openchannel('v2')
702def test_channel_state_changed_bilateral(node_factory, bitcoind):
703    """ We open and close a channel and check notifications both sides.
704
705    The misc_notifications.py plugin logs `channel_state_changed` events.
706    """
707    opts = {"plugin": os.path.join(os.getcwd(), "tests/plugins/misc_notifications.py")}
708    l1, l2 = node_factory.line_graph(2, opts=opts)
709
710    l1_id = l1.rpc.getinfo()["id"]
711    l2_id = l2.rpc.getinfo()["id"]
712    cid = l1.get_channel_id(l2)
713    scid = l1.get_channel_scid(l2)
714
715    # a helper that gives us the next channel_state_changed log entry
716    def wait_for_event(node):
717        msg = node.daemon.wait_for_log("channel_state_changed.*new_state.*")
718        event = ast.literal_eval(re.findall(".*({.*}).*", msg)[0])
719        return event
720
721    # check channel 'opener' and 'closer' within this testcase ...
722    assert(l1.rpc.listpeers()['peers'][0]['channels'][0]['opener'] == 'local')
723    assert(l2.rpc.listpeers()['peers'][0]['channels'][0]['opener'] == 'remote')
724    # the 'closer' should be missing initially
725    assert 'closer' not in l1.rpc.listpeers()['peers'][0]['channels'][0]
726    assert 'closer' not in l2.rpc.listpeers()['peers'][0]['channels'][0]
727
728    event1 = wait_for_event(l1)
729    event2 = wait_for_event(l2)
730    if l1.config('experimental-dual-fund'):
731        # Dual funded channels have an extra state change
732        assert(event1['peer_id'] == l2_id)  # we only test these IDs the first time
733        assert(event1['channel_id'] == cid)
734        assert(event1['short_channel_id'] is None)
735        assert(event1['old_state'] == "DUALOPEND_OPEN_INIT")
736        assert(event1['new_state'] == "DUALOPEND_AWAITING_LOCKIN")
737        assert(event1['cause'] == "user")
738        assert(event1['message'] == "Sigs exchanged, waiting for lock-in")
739        event1 = wait_for_event(l1)
740        assert(event2['peer_id'] == l1_id)  # we only test these IDs the first time
741        assert(event2['channel_id'] == cid)
742        assert(event2['short_channel_id'] is None)
743        assert(event2['old_state'] == "DUALOPEND_OPEN_INIT")
744        assert(event2['new_state'] == "DUALOPEND_AWAITING_LOCKIN")
745        assert(event2['cause'] == "remote")
746        assert(event2['message'] == "Sigs exchanged, waiting for lock-in")
747        event2 = wait_for_event(l2)
748
749    assert(event1['peer_id'] == l2_id)  # we only test these IDs the first time
750    assert(event1['channel_id'] == cid)
751    assert(event1['short_channel_id'] == scid)
752    if l1.config('experimental-dual-fund'):
753        assert(event1['old_state'] == "DUALOPEND_AWAITING_LOCKIN")
754    else:
755        assert(event1['old_state'] == "CHANNELD_AWAITING_LOCKIN")
756    assert(event1['new_state'] == "CHANNELD_NORMAL")
757    assert(event1['cause'] == "user")
758    assert(event1['message'] == "Lockin complete")
759
760    assert(event2['peer_id'] == l1_id)
761    assert(event2['channel_id'] == cid)
762    assert(event2['short_channel_id'] == scid)
763    if l1.config('experimental-dual-fund'):
764        assert(event2['old_state'] == "DUALOPEND_AWAITING_LOCKIN")
765    else:
766        assert(event2['old_state'] == "CHANNELD_AWAITING_LOCKIN")
767    assert(event2['new_state'] == "CHANNELD_NORMAL")
768    assert(event2['cause'] == "remote")
769    assert(event2['message'] == "Lockin complete")
770
771    # also test the correctness of timestamps once
772    assert(datetime.strptime(event1['timestamp'], '%Y-%m-%dT%H:%M:%S.%fZ'))
773    assert(datetime.strptime(event2['timestamp'], '%Y-%m-%dT%H:%M:%S.%fZ'))
774
775    # close channel and look for stateful events
776    l1.rpc.close(scid)
777
778    event1 = wait_for_event(l1)
779    assert(event1['old_state'] == "CHANNELD_NORMAL")
780    assert(event1['new_state'] == "CHANNELD_SHUTTING_DOWN")
781    assert(event1['cause'] == "user")
782    assert(event1['message'] == "User or plugin invoked close command")
783    event2 = wait_for_event(l2)
784    assert(event2['old_state'] == "CHANNELD_NORMAL")
785    assert(event2['new_state'] == "CHANNELD_SHUTTING_DOWN")
786    assert(event2['cause'] == "remote")
787    assert(event2['message'] == "Peer closes channel")
788
789    # 'closer' should now be set accordingly ...
790    assert(l1.rpc.listpeers()['peers'][0]['channels'][0]['closer'] == 'local')
791    assert(l2.rpc.listpeers()['peers'][0]['channels'][0]['closer'] == 'remote')
792
793    event1 = wait_for_event(l1)
794    assert(event1['old_state'] == "CHANNELD_SHUTTING_DOWN")
795    assert(event1['new_state'] == "CLOSINGD_SIGEXCHANGE")
796    assert(event1['cause'] == "user")
797    assert(event1['message'] == "Start closingd")
798    event2 = wait_for_event(l2)
799    assert(event2['old_state'] == "CHANNELD_SHUTTING_DOWN")
800    assert(event2['new_state'] == "CLOSINGD_SIGEXCHANGE")
801    assert(event2['cause'] == "remote")
802    assert(event2['message'] == "Start closingd")
803
804    event1 = wait_for_event(l1)
805    assert(event1['old_state'] == "CLOSINGD_SIGEXCHANGE")
806    assert(event1['new_state'] == "CLOSINGD_COMPLETE")
807    assert(event1['cause'] == "user")
808    assert(event1['message'] == "Closing complete")
809    event2 = wait_for_event(l2)
810    assert(event2['old_state'] == "CLOSINGD_SIGEXCHANGE")
811    assert(event2['new_state'] == "CLOSINGD_COMPLETE")
812    assert(event2['cause'] == "remote")
813    assert(event2['message'] == "Closing complete")
814
815    bitcoind.generate_block(100, wait_for_mempool=1)  # so it gets settled
816
817    event1 = wait_for_event(l1)
818    assert(event1['old_state'] == "CLOSINGD_COMPLETE")
819    assert(event1['new_state'] == "FUNDING_SPEND_SEEN")
820    assert(event1['cause'] == "user")
821    assert(event1['message'] == "Onchain funding spend")
822    event2 = wait_for_event(l2)
823    assert(event2['old_state'] == "CLOSINGD_COMPLETE")
824    assert(event2['new_state'] == "FUNDING_SPEND_SEEN")
825    assert(event2['cause'] == "remote")
826    assert(event2['message'] == "Onchain funding spend")
827
828    event1 = wait_for_event(l1)
829    assert(event1['old_state'] == "FUNDING_SPEND_SEEN")
830    assert(event1['new_state'] == "ONCHAIN")
831    assert(event1['cause'] == "user")
832    assert(event1['message'] == "Onchain init reply")
833    event2 = wait_for_event(l2)
834    assert(event2['old_state'] == "FUNDING_SPEND_SEEN")
835    assert(event2['new_state'] == "ONCHAIN")
836    assert(event2['cause'] == "remote")
837    assert(event2['message'] == "Onchain init reply")
838
839
840@pytest.mark.openchannel('v1')
841@pytest.mark.openchannel('v2')
842def test_channel_state_changed_unilateral(node_factory, bitcoind):
843    """ We open, disconnect, force-close a channel and check for notifications.
844
845    The misc_notifications.py plugin logs `channel_state_changed` events.
846    """
847    # FIXME: We can get warnings from unilteral changes, since we treat
848    # such errors a soft because LND.
849    opts = {"plugin": os.path.join(os.getcwd(), "tests/plugins/misc_notifications.py"),
850            "allow_warning": True}
851    if EXPERIMENTAL_DUAL_FUND:
852        opts['may_reconnect'] = True
853
854    l1, l2 = node_factory.line_graph(2, opts=opts)
855
856    l1_id = l1.rpc.getinfo()["id"]
857    cid = l1.get_channel_id(l2)
858    scid = l1.get_channel_scid(l2)
859
860    # a helper that gives us the next channel_state_changed log entry
861    def wait_for_event(node):
862        msg = node.daemon.wait_for_log("channel_state_changed.*new_state.*")
863        event = ast.literal_eval(re.findall(".*({.*}).*", msg)[0])
864        return event
865
866    event2 = wait_for_event(l2)
867    if l2.config('experimental-dual-fund'):
868        assert(event2['peer_id'] == l1_id)
869        assert(event2['channel_id'] == cid)
870        assert(event2['short_channel_id'] is None)
871        assert(event2['old_state'] == "DUALOPEND_OPEN_INIT")
872        assert(event2['new_state'] == "DUALOPEND_AWAITING_LOCKIN")
873        assert(event2['cause'] == "remote")
874        assert(event2['message'] == "Sigs exchanged, waiting for lock-in")
875        event2 = wait_for_event(l2)
876
877    assert(event2['peer_id'] == l1_id)  # we only test these IDs the first time
878    assert(event2['channel_id'] == cid)
879    assert(event2['short_channel_id'] == scid)
880    if l2.config('experimental-dual-fund'):
881        assert(event2['old_state'] == "DUALOPEND_AWAITING_LOCKIN")
882    else:
883        assert(event2['old_state'] == "CHANNELD_AWAITING_LOCKIN")
884    assert(event2['new_state'] == "CHANNELD_NORMAL")
885    assert(event2['cause'] == "remote")
886    assert(event2['message'] == "Lockin complete")
887
888    # close channel unilaterally and look for stateful events
889    l1.rpc.stop()
890    wait_for(lambda: not only_one(l2.rpc.listpeers()['peers'])['connected'])
891    l2.rpc.close(scid, 1)  # force close after 1sec timeout
892
893    event2 = wait_for_event(l2)
894    assert(event2['old_state'] == "CHANNELD_NORMAL")
895    assert(event2['new_state'] == "CHANNELD_SHUTTING_DOWN")
896    assert(event2['cause'] == "user")
897    assert(event2['message'] == "User or plugin invoked close command")
898    event2 = wait_for_event(l2)
899    assert(event2['old_state'] == "CHANNELD_SHUTTING_DOWN")
900    assert(event2['new_state'] == "AWAITING_UNILATERAL")
901    assert(event2['cause'] == "user")
902    assert(event2['message'] == "Forcibly closed by `close` command timeout")
903
904    # restart l1 early, as the test gets flaky when done after generate_block(100)
905    l1.restart()
906    wait_for(lambda: len(l1.rpc.listpeers()['peers']) == 1)
907    # check 'closer' on l2 while the peer is not yet forgotten
908    assert(l2.rpc.listpeers()['peers'][0]['channels'][0]['closer'] == 'local')
909    if EXPERIMENTAL_DUAL_FUND:
910        l1.daemon.wait_for_log(r'Peer has reconnected, state')
911        l2.daemon.wait_for_log(r'Peer has reconnected, state')
912
913    # settle the channel closure
914    bitcoind.generate_block(100)
915
916    event2 = wait_for_event(l2)
917    assert(event2['old_state'] == "AWAITING_UNILATERAL")
918    assert(event2['new_state'] == "FUNDING_SPEND_SEEN")
919    assert(event2['cause'] == "user")
920    assert(event2['message'] == "Onchain funding spend")
921    event2 = wait_for_event(l2)
922    assert(event2['old_state'] == "FUNDING_SPEND_SEEN")
923    assert(event2['new_state'] == "ONCHAIN")
924    assert(event2['cause'] == "user")
925    assert(event2['message'] == "Onchain init reply")
926
927    # Check 'closer' on l1 while the peer is not yet forgotten
928    event1 = wait_for_event(l1)
929    assert(l1.rpc.listpeers()['peers'][0]['channels'][0]['closer'] == 'remote')
930
931    # check if l1 sees ONCHAIN reasons for his channel
932    assert(event1['old_state'] == "CHANNELD_NORMAL")
933    assert(event1['new_state'] == "AWAITING_UNILATERAL")
934    assert(event1['cause'] == "onchain")
935    assert(event1['message'] == "Funding transaction spent")
936    event1 = wait_for_event(l1)
937    assert(event1['old_state'] == "AWAITING_UNILATERAL")
938    assert(event1['new_state'] == "FUNDING_SPEND_SEEN")
939    assert(event1['cause'] == "onchain")
940    assert(event1['message'] == "Onchain funding spend")
941    event1 = wait_for_event(l1)
942    assert(event1['old_state'] == "FUNDING_SPEND_SEEN")
943    assert(event1['new_state'] == "ONCHAIN")
944    assert(event1['cause'] == "onchain")
945    assert(event1['message'] == "Onchain init reply")
946
947
948@pytest.mark.openchannel('v1')
949@pytest.mark.openchannel('v2')
950def test_channel_state_change_history(node_factory, bitcoind):
951    """ We open and close a channel and check for state_canges entries.
952
953    """
954    l1, l2 = node_factory.line_graph(2)
955    scid = l1.get_channel_scid(l2)
956
957    l1.rpc.close(scid)
958    bitcoind.generate_block(100)  # so it gets settled
959    bitcoind.generate_block(100)  # so it gets settled
960
961    history = l1.rpc.listpeers()['peers'][0]['channels'][0]['state_changes']
962    if l1.config('experimental-dual-fund'):
963        assert(history[0]['cause'] == "user")
964        assert(history[0]['old_state'] == "DUALOPEND_OPEN_INIT")
965        assert(history[0]['new_state'] == "DUALOPEND_AWAITING_LOCKIN")
966        assert(history[1]['cause'] == "user")
967        assert(history[1]['old_state'] == "DUALOPEND_AWAITING_LOCKIN")
968        assert(history[1]['new_state'] == "CHANNELD_NORMAL")
969        assert(history[2]['cause'] == "user")
970        assert(history[2]['new_state'] == "CHANNELD_SHUTTING_DOWN")
971        assert(history[3]['cause'] == "user")
972        assert(history[3]['new_state'] == "CLOSINGD_SIGEXCHANGE")
973        assert(history[4]['cause'] == "user")
974        assert(history[4]['new_state'] == "CLOSINGD_COMPLETE")
975        assert(history[4]['message'] == "Closing complete")
976    else:
977        assert(history[0]['cause'] == "user")
978        assert(history[0]['old_state'] == "CHANNELD_AWAITING_LOCKIN")
979        assert(history[0]['new_state'] == "CHANNELD_NORMAL")
980        assert(history[1]['cause'] == "user")
981        assert(history[1]['new_state'] == "CHANNELD_SHUTTING_DOWN")
982        assert(history[2]['cause'] == "user")
983        assert(history[2]['new_state'] == "CLOSINGD_SIGEXCHANGE")
984        assert(history[3]['cause'] == "user")
985        assert(history[3]['new_state'] == "CLOSINGD_COMPLETE")
986        assert(history[3]['message'] == "Closing complete")
987
988
989@pytest.mark.developer("without DEVELOPER=1, gossip v slow")
990def test_htlc_accepted_hook_fail(node_factory):
991    """Send payments from l1 to l2, but l2 just declines everything.
992
993    l2 is configured with a plugin that'll hook into htlc_accepted and
994    always return failures. The same should also work for forwarded
995    htlcs in the second half.
996
997    """
998    l1, l2, l3 = node_factory.line_graph(3, opts=[
999        {},
1000        {'plugin': os.path.join(os.getcwd(), 'tests/plugins/fail_htlcs.py')},
1001        {}
1002    ], wait_for_announce=True)
1003
1004    # This must fail
1005    inv = l2.rpc.invoice(1000, "lbl", "desc")
1006    phash = inv['payment_hash']
1007    route = l1.rpc.getroute(l2.info['id'], 1000, 1)['route']
1008
1009    # Here shouldn't use `pay` command because l2 rejects with WIRE_TEMPORARY_NODE_FAILURE,
1010    # then it will be excluded when l1 try another pay attempt.
1011    # Note if the destination is excluded, the route result is undefined.
1012    l1.rpc.sendpay(route, phash, payment_secret=inv['payment_secret'])
1013    with pytest.raises(RpcError) as excinfo:
1014        l1.rpc.waitsendpay(phash)
1015    assert excinfo.value.error['data']['failcode'] == 0x2002
1016    assert excinfo.value.error['data']['erring_index'] == 1
1017
1018    # And the invoice must still be unpaid
1019    inv = l2.rpc.listinvoices("lbl")['invoices']
1020    assert len(inv) == 1 and inv[0]['status'] == 'unpaid'
1021
1022    # Now try with forwarded HTLCs: l2 should still fail them
1023    # This must fail
1024    inv = l3.rpc.invoice(1000, "lbl", "desc")['bolt11']
1025    with pytest.raises(RpcError):
1026        l1.rpc.pay(inv)
1027
1028    # And the invoice must still be unpaid
1029    inv = l3.rpc.listinvoices("lbl")['invoices']
1030    assert len(inv) == 1 and inv[0]['status'] == 'unpaid'
1031
1032
1033@pytest.mark.developer("without DEVELOPER=1, gossip v slow")
1034def test_htlc_accepted_hook_resolve(node_factory):
1035    """l3 creates an invoice, l2 knows the preimage and will shortcircuit.
1036    """
1037    l1, l2, l3 = node_factory.line_graph(3, opts=[
1038        {},
1039        {'plugin': os.path.join(os.getcwd(), 'tests/plugins/shortcircuit.py')},
1040        {}
1041    ], wait_for_announce=True)
1042
1043    inv = l3.rpc.invoice(msatoshi=1000, label="lbl", description="desc", preimage="00" * 32)['bolt11']
1044    l1.rpc.pay(inv)
1045
1046    # And the invoice must still be unpaid
1047    inv = l3.rpc.listinvoices("lbl")['invoices']
1048    assert len(inv) == 1 and inv[0]['status'] == 'unpaid'
1049
1050
1051def test_htlc_accepted_hook_direct_restart(node_factory, executor):
1052    """l2 restarts while it is pondering what to do with an HTLC.
1053    """
1054    l1, l2 = node_factory.line_graph(2, opts=[
1055        {'may_reconnect': True},
1056        {'may_reconnect': True,
1057         'plugin': os.path.join(os.getcwd(), 'tests/plugins/hold_htlcs.py')}
1058    ])
1059
1060    i1 = l2.rpc.invoice(msatoshi=1000, label="direct", description="desc")['bolt11']
1061    f1 = executor.submit(l1.rpc.pay, i1)
1062
1063    l2.daemon.wait_for_log(r'Holding onto an incoming htlc for 10 seconds')
1064
1065    # Check that the status mentions the HTLC being held
1066    l2.rpc.listpeers()
1067    peers = l2.rpc.listpeers()['peers']
1068    htlc_status = peers[0]['channels'][0]['htlcs'][0].get('status', None)
1069    assert htlc_status == "Waiting for the htlc_accepted hook of plugin hold_htlcs.py"
1070
1071    needle = l2.daemon.logsearch_start
1072    l2.restart()
1073
1074    # Now it should try again, *after* initializing.
1075    # This may be before "Server started with public key" swallowed by restart()
1076    l2.daemon.logsearch_start = needle + 1
1077    l2.daemon.wait_for_log(r'hold_htlcs.py initializing')
1078    l2.daemon.wait_for_log(r'Holding onto an incoming htlc for 10 seconds')
1079    f1.result()
1080
1081
1082@pytest.mark.developer("without DEVELOPER=1, gossip v slow")
1083def test_htlc_accepted_hook_forward_restart(node_factory, executor):
1084    """l2 restarts while it is pondering what to do with an HTLC.
1085    """
1086    l1, l2, l3 = node_factory.line_graph(3, opts=[
1087        {'may_reconnect': True},
1088        {'may_reconnect': True,
1089         'plugin': os.path.join(os.getcwd(), 'tests/plugins/hold_htlcs.py')},
1090        {'may_reconnect': True},
1091    ], wait_for_announce=True)
1092
1093    i1 = l3.rpc.invoice(msatoshi=1000, label="direct", description="desc")['bolt11']
1094    f1 = executor.submit(l1.rpc.dev_pay, i1, use_shadow=False)
1095
1096    l2.daemon.wait_for_log(r'Holding onto an incoming htlc for 10 seconds')
1097
1098    needle = l2.daemon.logsearch_start
1099    l2.restart()
1100
1101    # Now it should try again, *after* initializing.
1102    # This may be before "Server started with public key" swallowed by restart()
1103    l2.daemon.logsearch_start = needle + 1
1104    l2.daemon.wait_for_log(r'hold_htlcs.py initializing')
1105    l2.daemon.wait_for_log(r'Holding onto an incoming htlc for 10 seconds')
1106
1107    # Grab the file where the plugin wrote the onion and read it in for some
1108    # additional checks
1109    logline = l2.daemon.wait_for_log(r'Onion written to')
1110    fname = re.search(r'Onion written to (.*\.json)', logline).group(1)
1111    onion = json.load(open(fname))
1112    assert onion['type'] == 'tlv'
1113    assert re.match(r'^11020203e80401..0608................$', onion['payload'])
1114    assert len(onion['shared_secret']) == 64
1115    assert onion['forward_amount'] == '1000msat'
1116    assert len(onion['next_onion']) == 2 * (1300 + 32 + 33 + 1)
1117
1118    f1.result()
1119
1120
1121def test_warning_notification(node_factory):
1122    """ test 'warning' notifications
1123    """
1124    l1 = node_factory.get_node(options={'plugin': os.path.join(os.getcwd(), 'tests/plugins/pretend_badlog.py')}, allow_broken_log=True)
1125
1126    # 1. test 'warn' level
1127    event = "Test warning notification(for unusual event)"
1128    l1.rpc.call('pretendbad', {'event': event, 'level': 'warn'})
1129
1130    # ensure an unusual log_entry was produced by 'pretendunusual' method
1131    l1.daemon.wait_for_log('plugin-pretend_badlog.py: Test warning notification\\(for unusual event\\)')
1132
1133    # now wait for notification
1134    l1.daemon.wait_for_log('plugin-pretend_badlog.py: Received warning')
1135    l1.daemon.wait_for_log('plugin-pretend_badlog.py: level: warn')
1136    l1.daemon.wait_for_log('plugin-pretend_badlog.py: time: *')
1137    l1.daemon.wait_for_log('plugin-pretend_badlog.py: source: plugin-pretend_badlog.py')
1138    l1.daemon.wait_for_log('plugin-pretend_badlog.py: log: Test warning notification\\(for unusual event\\)')
1139
1140    # 2. test 'error' level, steps like above
1141    event = "Test warning notification(for broken event)"
1142    l1.rpc.call('pretendbad', {'event': event, 'level': 'error'})
1143    l1.daemon.wait_for_log(r'\*\*BROKEN\*\* plugin-pretend_badlog.py: Test warning notification\(for broken event\)')
1144
1145    l1.daemon.wait_for_log('plugin-pretend_badlog.py: Received warning')
1146    l1.daemon.wait_for_log('plugin-pretend_badlog.py: level: error')
1147    l1.daemon.wait_for_log('plugin-pretend_badlog.py: time: *')
1148    l1.daemon.wait_for_log('plugin-pretend_badlog.py: source: plugin-pretend_badlog.py')
1149    l1.daemon.wait_for_log('plugin-pretend_badlog.py: log: Test warning notification\\(for broken event\\)')
1150
1151
1152@pytest.mark.developer("needs to deactivate shadow routing")
1153def test_invoice_payment_notification(node_factory):
1154    """
1155    Test the 'invoice_payment' notification
1156    """
1157    opts = [{}, {"plugin": os.path.join(os.getcwd(), "contrib/plugins/helloworld.py")}]
1158    l1, l2 = node_factory.line_graph(2, opts=opts)
1159
1160    msats = 12345
1161    preimage = '1' * 64
1162    label = "a_descriptive_label"
1163    inv1 = l2.rpc.invoice(msats, label, 'description', preimage=preimage)
1164    l1.rpc.dev_pay(inv1['bolt11'], use_shadow=False)
1165
1166    l2.daemon.wait_for_log(r"Received invoice_payment event for label {},"
1167                           " preimage {}, and amount of {}msat"
1168                           .format(label, preimage, msats))
1169
1170
1171@pytest.mark.developer("needs to deactivate shadow routing")
1172def test_invoice_creation_notification(node_factory):
1173    """
1174    Test the 'invoice_creation' notification
1175    """
1176    opts = [{}, {"plugin": os.path.join(os.getcwd(), "contrib/plugins/helloworld.py")}]
1177    l1, l2 = node_factory.line_graph(2, opts=opts)
1178
1179    msats = 12345
1180    preimage = '1' * 64
1181    label = "a_descriptive_label"
1182    l2.rpc.invoice(msats, label, 'description', preimage=preimage)
1183
1184    l2.daemon.wait_for_log(r"Received invoice_creation event for label {},"
1185                           " preimage {}, and amount of {}msat"
1186                           .format(label, preimage, msats))
1187
1188
1189def test_channel_opened_notification(node_factory):
1190    """
1191    Test the 'channel_opened' notification sent at channel funding success.
1192    """
1193    opts = [{}, {"plugin": os.path.join(os.getcwd(), "tests/plugins/misc_notifications.py")}]
1194    amount = 10**6
1195    l1, l2 = node_factory.line_graph(2, fundchannel=True, fundamount=amount,
1196                                     opts=opts)
1197
1198    # Might have already passed, so reset start.
1199    l2.daemon.logsearch_start = 0
1200    l2.daemon.wait_for_log(r"A channel was opened to us by {}, "
1201                           "with an amount of {}*"
1202                           .format(l1.info["id"], amount))
1203
1204
1205@pytest.mark.developer("needs DEVELOPER=1")
1206def test_forward_event_notification(node_factory, bitcoind, executor):
1207    """ test 'forward_event' notifications
1208    """
1209    amount = 10**8
1210    disconnects = ['-WIRE_UPDATE_FAIL_HTLC', 'permfail']
1211    plugin = os.path.join(
1212        os.path.dirname(__file__),
1213        'plugins',
1214        'forward_payment_status.py'
1215    )
1216    l1, l2, l3, l4, l5 = node_factory.get_nodes(5, opts=[
1217        {},
1218        {'plugin': plugin},
1219        {},
1220        {},
1221        {'disconnect': disconnects}])
1222
1223    l1.openchannel(l2, confirm=False, wait_for_announce=False)
1224    l2.openchannel(l3, confirm=False, wait_for_announce=False)
1225    l2.openchannel(l4, confirm=False, wait_for_announce=False)
1226    l2.openchannel(l5, confirm=False, wait_for_announce=False)
1227
1228    # Generate 5, then make sure everyone is up to date before
1229    # last one, otherwise they might think it's in the future!
1230    bitcoind.generate_block(5)
1231    sync_blockheight(bitcoind, [l1, l2, l3, l4, l5])
1232    bitcoind.generate_block(1)
1233
1234    wait_for(lambda: len(l1.rpc.listchannels()['channels']) == 8)
1235
1236    inv = l3.rpc.invoice(amount, "first", "desc")
1237    payment_hash13 = inv['payment_hash']
1238    route = l1.rpc.getroute(l3.info['id'], amount, 1)['route']
1239
1240    # status: offered -> settled
1241    l1.rpc.sendpay(route, payment_hash13, payment_secret=inv['payment_secret'])
1242    l1.rpc.waitsendpay(payment_hash13)
1243
1244    # status: offered -> failed
1245    route = l1.rpc.getroute(l4.info['id'], amount, 1)['route']
1246    payment_hash14 = "f" * 64
1247    with pytest.raises(RpcError):
1248        l1.rpc.sendpay(route, payment_hash14, payment_secret="f" * 64)
1249        l1.rpc.waitsendpay(payment_hash14)
1250
1251    # status: offered -> local_failed
1252    inv = l5.rpc.invoice(amount, 'onchain_timeout', 'desc')
1253    payment_hash15 = inv['payment_hash']
1254    fee = amount * 10 // 1000000 + 1
1255    c12 = l1.get_channel_scid(l2)
1256    c25 = l2.get_channel_scid(l5)
1257    route = [{'msatoshi': amount + fee - 1,
1258              'id': l2.info['id'],
1259              'delay': 12,
1260              'channel': c12},
1261             {'msatoshi': amount - 1,
1262              'id': l5.info['id'],
1263              'delay': 5,
1264              'channel': c25}]
1265
1266    executor.submit(l1.rpc.sendpay, route, payment_hash15, payment_secret=inv['payment_secret'])
1267
1268    l5.daemon.wait_for_log('permfail')
1269    l5.wait_for_channel_onchain(l2.info['id'])
1270    l2.bitcoin.generate_block(1)
1271    l2.daemon.wait_for_log(' to ONCHAIN')
1272    l5.daemon.wait_for_log(' to ONCHAIN')
1273
1274    l2.daemon.wait_for_log('Propose handling THEIR_UNILATERAL/OUR_HTLC by OUR_HTLC_TIMEOUT_TO_US .* after 6 blocks')
1275    bitcoind.generate_block(6)
1276
1277    l2.wait_for_onchaind_broadcast('OUR_HTLC_TIMEOUT_TO_US',
1278                                   'THEIR_UNILATERAL/OUR_HTLC')
1279
1280    bitcoind.generate_block(1)
1281    l2.daemon.wait_for_log('Resolved THEIR_UNILATERAL/OUR_HTLC by our proposal OUR_HTLC_TIMEOUT_TO_US')
1282    l5.daemon.wait_for_log('Ignoring output.*: OUR_UNILATERAL/THEIR_HTLC')
1283
1284    bitcoind.generate_block(100)
1285    sync_blockheight(bitcoind, [l2])
1286
1287    stats = l2.rpc.listforwards()['forwards']
1288    assert len(stats) == 3
1289    plugin_stats = l2.rpc.call('listforwards_plugin')['forwards']
1290    assert len(plugin_stats) == 6
1291
1292    # use stats to build what we expect went to plugin.
1293    expect = stats[0].copy()
1294    # First event won't have conclusion.
1295    del expect['resolved_time']
1296    expect['status'] = 'offered'
1297    assert plugin_stats[0] == expect
1298    expect = stats[0].copy()
1299    assert plugin_stats[1] == expect
1300
1301    expect = stats[1].copy()
1302    del expect['resolved_time']
1303    expect['status'] = 'offered'
1304    assert plugin_stats[2] == expect
1305    expect = stats[1].copy()
1306    assert plugin_stats[3] == expect
1307
1308    expect = stats[2].copy()
1309    del expect['failcode']
1310    del expect['failreason']
1311    expect['status'] = 'offered'
1312    assert plugin_stats[4] == expect
1313    expect = stats[2].copy()
1314    assert plugin_stats[5] == expect
1315
1316
1317def test_sendpay_notifications(node_factory, bitcoind):
1318    """ test 'sendpay_success' and 'sendpay_failure' notifications
1319    """
1320    amount = 10**8
1321    opts = [{'plugin': os.path.join(os.getcwd(), 'tests/plugins/sendpay_notifications.py')},
1322            {},
1323            {'may_reconnect': False}]
1324    l1, l2, l3 = node_factory.line_graph(3, opts=opts, wait_for_announce=True)
1325    chanid23 = l2.get_channel_scid(l3)
1326
1327    inv1 = l3.rpc.invoice(amount, "first", "desc")
1328    payment_hash1 = inv1['payment_hash']
1329    inv2 = l3.rpc.invoice(amount, "second", "desc")
1330    payment_hash2 = inv2['payment_hash']
1331    route = l1.rpc.getroute(l3.info['id'], amount, 1)['route']
1332
1333    l1.rpc.sendpay(route, payment_hash1, payment_secret=inv1['payment_secret'])
1334    response1 = l1.rpc.waitsendpay(payment_hash1)
1335
1336    l2.rpc.close(chanid23, 1)
1337
1338    l1.rpc.sendpay(route, payment_hash2, payment_secret=inv2['payment_secret'])
1339    with pytest.raises(RpcError) as err:
1340        l1.rpc.waitsendpay(payment_hash2)
1341
1342    results = l1.rpc.call('listsendpays_plugin')
1343    assert len(results['sendpay_success']) == 1
1344    assert len(results['sendpay_failure']) == 1
1345
1346    assert results['sendpay_success'][0] == response1
1347    assert results['sendpay_failure'][0] == err.value.error
1348
1349
1350def test_sendpay_notifications_nowaiter(node_factory):
1351    opts = [{'plugin': os.path.join(os.getcwd(), 'tests/plugins/sendpay_notifications.py')},
1352            {},
1353            {'may_reconnect': False}]
1354    l1, l2, l3 = node_factory.line_graph(3, opts=opts, wait_for_announce=True)
1355    chanid23 = l2.get_channel_scid(l3)
1356    amount = 10**8
1357
1358    inv1 = l3.rpc.invoice(amount, "first", "desc")
1359    payment_hash1 = inv1['payment_hash']
1360    inv2 = l3.rpc.invoice(amount, "second", "desc")
1361    payment_hash2 = inv2['payment_hash']
1362    route = l1.rpc.getroute(l3.info['id'], amount, 1)['route']
1363
1364    l1.rpc.sendpay(route, payment_hash1, payment_secret=inv1['payment_secret'])
1365    l1.daemon.wait_for_log(r'Received a sendpay_success')
1366
1367    l2.rpc.close(chanid23, 1)
1368
1369    l1.rpc.sendpay(route, payment_hash2, payment_secret=inv2['payment_secret'])
1370    l1.daemon.wait_for_log(r'Received a sendpay_failure')
1371
1372    results = l1.rpc.call('listsendpays_plugin')
1373    assert len(results['sendpay_success']) == 1
1374    assert len(results['sendpay_failure']) == 1
1375
1376
1377def test_rpc_command_hook(node_factory):
1378    """Test the `rpc_command` hook chain"""
1379    plugin = [
1380        os.path.join(os.getcwd(), "tests/plugins/rpc_command_1.py"),
1381        os.path.join(os.getcwd(), "tests/plugins/rpc_command_2.py")
1382    ]
1383    l1 = node_factory.get_node(options={"plugin": plugin})
1384
1385    # rpc_command_2 plugin restricts using "sendpay"
1386    with pytest.raises(RpcError, match=r"rpc_command_2 cannot do this"):
1387        l1.rpc.call("sendpay")
1388
1389    # Both plugins will replace calls made for the "invoice" command
1390    # The first will win, for the second a warning should be logged
1391    invoice = l1.rpc.invoice(10**6, "test_side", "test_input")
1392    decoded = l1.rpc.decodepay(invoice["bolt11"])
1393    assert decoded["description"] == "rpc_command_1 modified this description"
1394    l1.daemon.wait_for_log("rpc_command hook 'invoice' already modified, ignoring.")
1395
1396    # Disable schema checking here!
1397    schemas = l1.rpc.jsonschemas
1398    l1.rpc.jsonschemas = {}
1399    # rpc_command_1 plugin sends a custom response to "listfunds"
1400    funds = l1.rpc.listfunds()
1401    assert funds[0] == "Custom rpc_command_1 result"
1402
1403    # Test command redirection to a plugin
1404    l1.rpc.call('help', [0])
1405
1406    # Check the 'already modified' warning is not logged on just 'continue'
1407    assert not l1.daemon.is_in_log("rpc_command hook 'listfunds' already modified, ignoring.")
1408
1409    # Tests removing a chained hook in random order.
1410    # Note: This will get flaky by design if theres a problem.
1411    if bool(random.getrandbits(1)):
1412        l1.rpc.plugin_stop('rpc_command_2.py')
1413        l1.rpc.plugin_stop('rpc_command_1.py')
1414    else:
1415        l1.rpc.plugin_stop('rpc_command_1.py')
1416        l1.rpc.plugin_stop('rpc_command_2.py')
1417
1418    l1.rpc.jsonschemas = schemas
1419
1420
1421def test_libplugin(node_factory):
1422    """Sanity checks for plugins made with libplugin"""
1423    plugin = os.path.join(os.getcwd(), "tests/plugins/test_libplugin")
1424    l1 = node_factory.get_node(options={"plugin": plugin,
1425                                        'allow-deprecated-apis': False})
1426
1427    # Test startup
1428    assert l1.daemon.is_in_log("test_libplugin initialised!")
1429    # Test dynamic startup
1430    l1.rpc.plugin_stop(plugin)
1431    l1.rpc.plugin_start(plugin)
1432    l1.rpc.check("helloworld")
1433
1434    # Test commands
1435    assert l1.rpc.call("helloworld") == {"hello": "world"}
1436    assert l1.rpc.call("helloworld", {"name": "test"}) == {"hello": "test"}
1437    l1.stop()
1438    l1.daemon.opts["plugin"] = plugin
1439    l1.daemon.opts["name"] = "test_opt"
1440    l1.start()
1441    assert l1.rpc.call("helloworld") == {"hello": "test_opt"}
1442    # But param takes over!
1443    assert l1.rpc.call("helloworld", {"name": "test"}) == {"hello": "test"}
1444
1445    # Test hooks and notifications
1446    l2 = node_factory.get_node()
1447    l2.connect(l1)
1448    l1.daemon.wait_for_log("{} peer_connected".format(l2.info["id"]))
1449    l1.daemon.wait_for_log("{} connected".format(l2.info["id"]))
1450
1451    # Test RPC calls FIXME: test concurrent ones ?
1452    assert l1.rpc.call("testrpc") == l1.rpc.getinfo()
1453
1454    # Make sure deprecated options nor commands are mentioned.
1455    with pytest.raises(RpcError, match=r'Command "testrpc-deprecated" is deprecated'):
1456        l1.rpc.call('testrpc-deprecated')
1457
1458    assert not any([h['command'] == 'testrpc-deprecated'
1459                    for h in l1.rpc.help()['help']])
1460    with pytest.raises(RpcError, match=r"Deprecated command.*testrpc-deprecated"):
1461        l1.rpc.help('testrpc-deprecated')
1462
1463    assert 'name-deprecated' not in str(l1.rpc.listconfigs())
1464
1465    l1.stop()
1466    l1.daemon.opts["name-deprecated"] = "test_opt"
1467
1468    # This actually dies while waiting for the logs.
1469    with pytest.raises(ValueError):
1470        l1.start()
1471
1472    del l1.daemon.opts["name-deprecated"]
1473    l1.start()
1474
1475
1476def test_libplugin_deprecated(node_factory):
1477    """Sanity checks for plugins made with libplugin using deprecated args"""
1478    plugin = os.path.join(os.getcwd(), "tests/plugins/test_libplugin")
1479    l1 = node_factory.get_node(options={"plugin": plugin,
1480                                        'name-deprecated': 'test_opt depr',
1481                                        'allow-deprecated-apis': True})
1482
1483    assert l1.rpc.call("helloworld") == {"hello": "test_opt depr"}
1484    l1.rpc.help('testrpc-deprecated')
1485    assert l1.rpc.call("testrpc-deprecated") == l1.rpc.getinfo()
1486
1487
1488@unittest.skipIf(
1489    not DEVELOPER or DEPRECATED_APIS, "needs LIGHTNINGD_DEV_LOG_IO and new API"
1490)
1491@pytest.mark.openchannel('v1')
1492@pytest.mark.openchannel('v2')
1493def test_plugin_feature_announce(node_factory):
1494    """Check that features registered by plugins show up in messages.
1495
1496    l1 is the node under test, l2 only serves as the counterparty for a
1497    channel to check the featurebits in the `channel_announcement`. The plugin
1498    registers an individual featurebit for each of the locations we can stash
1499    feature bits in:
1500
1501     - 1 << 201 for `init` messages
1502     - 1 << 203 for `node_announcement`
1503     - 1 << 205 for bolt11 invoices
1504
1505    """
1506    plugin = os.path.join(os.path.dirname(__file__), 'plugins/feature-test.py')
1507    l1, l2 = node_factory.line_graph(
1508        2, opts=[{'plugin': plugin, 'log-level': 'io'}, {}],
1509        wait_for_announce=True
1510    )
1511
1512    extra = []
1513    if l1.config('experimental-dual-fund'):
1514        extra.append(21)  # option-anchor-outputs
1515        extra.append(29)  # option-dual-fund
1516
1517    # Check the featurebits we've set in the `init` message from
1518    # feature-test.py.
1519    assert l1.daemon.is_in_log(r'\[OUT\] 001000022200....{}'
1520                               .format(expected_peer_features(extra=[201] + extra)))
1521
1522    # Check the invoice featurebit we set in feature-test.py
1523    inv = l1.rpc.invoice(123, 'lbl', 'desc')['bolt11']
1524    details = Invoice.decode(inv)
1525    assert(details.featurebits.int & (1 << 205) != 0)
1526
1527    # Check the featurebit set in the `node_announcement`
1528    node = l1.rpc.listnodes(l1.info['id'])['nodes'][0]
1529    assert node['features'] == expected_node_features(extra=[203] + extra)
1530
1531
1532def test_hook_chaining(node_factory):
1533    """Check that hooks are called in order and the chain exits correctly
1534
1535    We start two nodes, l2 will have two plugins registering the same hook
1536    (`htlc_accepted`) but handle different cases:
1537
1538    - the `odd` plugin only handles the "AA"*32 preimage
1539    - the `even` plugin only handles the "BB"*32 preimage
1540
1541    We check that plugins are called in the order they are registering the
1542    hook, and that they exit the call chain as soon as one plugin returns a
1543    result that isn't `continue`. On exiting the chain the remaining plugins
1544    are not called. If no plugin exits the chain we continue to handle
1545    internally as usual.
1546
1547    """
1548    l1, l2 = node_factory.line_graph(2)
1549
1550    # Start the plugins manually instead of specifying them on the command
1551    # line, otherwise we cannot guarantee the order in which the hooks are
1552    # registered.
1553    p1 = os.path.join(os.path.dirname(__file__), "plugins/hook-chain-odd.py")
1554    p2 = os.path.join(os.path.dirname(__file__), "plugins/hook-chain-even.py")
1555    l2.rpc.plugin_start(p1)
1556    l2.rpc.plugin_start(p2)
1557
1558    preimage1 = b'\xAA' * 32
1559    preimage2 = b'\xBB' * 32
1560    preimage3 = b'\xCC' * 32
1561    hash1 = sha256(preimage1).hexdigest()
1562    hash2 = sha256(preimage2).hexdigest()
1563    hash3 = sha256(preimage3).hexdigest()
1564
1565    inv = l2.rpc.invoice(123, 'odd', "Odd payment handled by the first plugin",
1566                         preimage="AA" * 32)['bolt11']
1567    l1.rpc.pay(inv)
1568
1569    # The first plugin will handle this, the second one should not be called.
1570    assert(l2.daemon.is_in_log(
1571        r'plugin-hook-chain-odd.py: htlc_accepted called for payment_hash {}'.format(hash1)
1572    ))
1573    assert(not l2.daemon.is_in_log(
1574        r'plugin-hook-chain-even.py: htlc_accepted called for payment_hash {}'.format(hash1)
1575    ))
1576
1577    # The second run is with a payment_hash that `hook-chain-even.py` knows
1578    # about. `hook-chain-odd.py` is called, it returns a `continue`, and then
1579    # `hook-chain-even.py` resolves it.
1580    inv = l2.rpc.invoice(
1581        123, 'even', "Even payment handled by the second plugin", preimage="BB" * 32
1582    )['bolt11']
1583    l1.rpc.pay(inv)
1584    assert(l2.daemon.is_in_log(
1585        r'plugin-hook-chain-odd.py: htlc_accepted called for payment_hash {}'.format(hash2)
1586    ))
1587    assert(l2.daemon.is_in_log(
1588        r'plugin-hook-chain-even.py: htlc_accepted called for payment_hash {}'.format(hash2)
1589    ))
1590
1591    # And finally an invoice that neither know about, so it should get settled
1592    # by the internal invoice handling.
1593    inv = l2.rpc.invoice(123, 'neither', "Neither plugin handles this",
1594                         preimage="CC" * 32)['bolt11']
1595    l1.rpc.pay(inv)
1596    assert(l2.daemon.is_in_log(
1597        r'plugin-hook-chain-odd.py: htlc_accepted called for payment_hash {}'.format(hash3)
1598    ))
1599    assert(l2.daemon.is_in_log(
1600        r'plugin-hook-chain-even.py: htlc_accepted called for payment_hash {}'.format(hash3)
1601    ))
1602
1603
1604def test_bitcoin_backend(node_factory, bitcoind):
1605    """
1606    This tests interaction with the Bitcoin backend, but not specifically bcli
1607    """
1608    l1 = node_factory.get_node(start=False, options={"disable-plugin": "bcli"},
1609                               may_fail=True, allow_broken_log=True)
1610
1611    # We don't start if we haven't all the required methods registered.
1612    plugin = os.path.join(os.getcwd(), "tests/plugins/bitcoin/part1.py")
1613    l1.daemon.opts["plugin"] = plugin
1614    try:
1615        l1.daemon.start()
1616    except ValueError:
1617        assert l1.daemon.is_in_log("Missing a Bitcoin plugin command")
1618        # Now we should start if all the commands are registered, even if they
1619        # are registered by two distincts plugins.
1620        del l1.daemon.opts["plugin"]
1621        l1.daemon.opts["plugin-dir"] = os.path.join(os.getcwd(),
1622                                                    "tests/plugins/bitcoin/")
1623        try:
1624            l1.daemon.start()
1625        except ValueError:
1626            msg = "All Bitcoin plugin commands registered"
1627            assert l1.daemon.is_in_log(msg)
1628        else:
1629            raise Exception("We registered all commands but couldn't start!")
1630    else:
1631        raise Exception("We could start without all commands registered !!")
1632
1633    # But restarting with just bcli is ok
1634    del l1.daemon.opts["plugin-dir"]
1635    del l1.daemon.opts["disable-plugin"]
1636    l1.start()
1637    assert l1.daemon.is_in_log("bitcoin-cli initialized and connected to"
1638                               " bitcoind")
1639
1640
1641def test_bcli(node_factory, bitcoind, chainparams):
1642    """
1643    This tests the bcli plugin, used to gather Bitcoin data from a local
1644    bitcoind.
1645    Mostly sanity checks of the interface..
1646    """
1647    l1, l2 = node_factory.get_nodes(2)
1648
1649    # We cant stop it dynamically
1650    with pytest.raises(RpcError):
1651        l1.rpc.plugin_stop("bcli")
1652
1653    # Failure case of feerate is tested in test_misc.py
1654    estimates = l1.rpc.call("estimatefees")
1655    for est in ["opening", "mutual_close", "unilateral_close", "delayed_to_us",
1656                "htlc_resolution", "penalty", "min_acceptable",
1657                "max_acceptable"]:
1658        assert est in estimates
1659
1660    resp = l1.rpc.call("getchaininfo")
1661    assert resp["chain"] == chainparams['name']
1662    for field in ["headercount", "blockcount", "ibd"]:
1663        assert field in resp
1664
1665    # We shouldn't get upset if we ask for an unknown-yet block
1666    resp = l1.rpc.call("getrawblockbyheight", {"height": 500})
1667    assert resp["blockhash"] is resp["block"] is None
1668    resp = l1.rpc.call("getrawblockbyheight", {"height": 50})
1669    assert resp["blockhash"] is not None and resp["blockhash"] is not None
1670    # Some other bitcoind-failure cases for this call are covered in
1671    # tests/test_misc.py
1672
1673    l1.fundwallet(10**5)
1674    l1.connect(l2)
1675    fc = l1.rpc.fundchannel(l2.info["id"], 10**4 * 3)
1676    txo = l1.rpc.call("getutxout", {"txid": fc['txid'], "vout": fc['outnum']})
1677    assert (Millisatoshi(txo["amount"]) == Millisatoshi(10**4 * 3 * 10**3)
1678            and txo["script"].startswith("0020"))
1679    l1.rpc.close(l2.info["id"])
1680    # When output is spent, it should give us null !
1681    wait_for(lambda: l1.rpc.call("getutxout", {
1682        "txid": fc['txid'],
1683        "vout": fc['outnum']
1684    })['amount'] is None)
1685
1686    resp = l1.rpc.call("sendrawtransaction", {"tx": "dummy", "allowhighfees": False})
1687    assert not resp["success"] and "decode failed" in resp["errmsg"]
1688
1689
1690def test_hook_crash(node_factory, executor, bitcoind):
1691    """Verify that we fail over if a plugin crashes while handling a hook.
1692
1693    We create a star topology, with l1 opening channels to the other nodes,
1694    and then triggering the plugins on those nodes in order to exercise the
1695    hook chain. p0 is the interesting plugin because as soon as it get called
1696    for the htlc_accepted hook it'll crash on purpose. We should still make it
1697    through the chain, the plugins should all be called and the payment should
1698    still go through.
1699
1700    """
1701    p0 = os.path.join(os.path.dirname(__file__), "plugins/hook-crash.py")
1702    p1 = os.path.join(os.path.dirname(__file__), "plugins/hook-chain-odd.py")
1703    p2 = os.path.join(os.path.dirname(__file__), "plugins/hook-chain-even.py")
1704    perm = [
1705        (p0, p1, p2),  # Crashing plugin is first in chain
1706        (p1, p0, p2),  # Crashing plugin is in the middle of the chain
1707        (p1, p2, p0),  # Crashing plugin is last in chain
1708    ]
1709
1710    l1 = node_factory.get_node()
1711    nodes = [node_factory.get_node() for _ in perm]
1712
1713    # Start them in any order and we should still always end up with each
1714    # plugin being called and ultimately the `pay` call should succeed:
1715    for plugins, n in zip(perm, nodes):
1716        for p in plugins:
1717            n.rpc.plugin_start(p)
1718        l1.openchannel(n, 10**6, confirm=False, wait_for_announce=False)
1719
1720    bitcoind.generate_block(6)
1721
1722    wait_for(lambda: len(l1.rpc.listchannels()['channels']) == 2 * len(perm))
1723
1724    # Start an RPC call that should error once the plugin crashes.
1725    f1 = executor.submit(nodes[0].rpc.hold_rpc_call)
1726
1727    futures = []
1728    for n in nodes:
1729        inv = n.rpc.invoice(123, "lbl", "desc")['bolt11']
1730        futures.append(executor.submit(l1.rpc.pay, inv))
1731
1732    for n in nodes:
1733        n.daemon.wait_for_logs([
1734            r'Plugin is about to crash.',
1735            r'plugin-hook-chain-odd.py: htlc_accepted called for payment_hash',
1736            r'plugin-hook-chain-even.py: htlc_accepted called for payment_hash',
1737        ])
1738
1739    # Collect the results:
1740    [f.result(TIMEOUT) for f in futures]
1741
1742    # Make sure the RPC call was terminated with the correct error
1743    with pytest.raises(RpcError, match=r'Plugin terminated before replying'):
1744        f1.result(10)
1745
1746
1747@pytest.mark.openchannel('v1')
1748@pytest.mark.openchannel('v2')
1749def test_feature_set(node_factory):
1750    plugin = os.path.join(os.path.dirname(__file__), 'plugins/show_feature_set.py')
1751    l1 = node_factory.get_node(options={"plugin": plugin})
1752
1753    fs = l1.rpc.call('getfeatureset')
1754
1755    assert fs['init'] == expected_peer_features()
1756    assert fs['node'] == expected_node_features()
1757    assert fs['channel'] == expected_channel_features()
1758    assert 'invoice' in fs
1759
1760
1761def test_replacement_payload(node_factory):
1762    """Test that htlc_accepted plugin hook can replace payload"""
1763    plugin = os.path.join(os.path.dirname(__file__), 'plugins/replace_payload.py')
1764    l1, l2 = node_factory.line_graph(
1765        2,
1766        opts=[{}, {"plugin": plugin}],
1767        wait_for_announce=True
1768    )
1769
1770    # Replace with an invalid payload.
1771    l2.rpc.call('setpayload', ['0000'])
1772    inv = l2.rpc.invoice(123, 'test_replacement_payload', 'test_replacement_payload')['bolt11']
1773    with pytest.raises(RpcError, match=r"WIRE_INVALID_ONION_PAYLOAD \(reply from remote\)"):
1774        l1.rpc.pay(inv)
1775
1776    # Replace with valid payload, but corrupt payment_secret
1777    l2.rpc.call('setpayload', ['corrupt_secret'])
1778
1779    with pytest.raises(RpcError, match=r"WIRE_INCORRECT_OR_UNKNOWN_PAYMENT_DETAILS \(reply from remote\)"):
1780        l1.rpc.pay(inv)
1781
1782    assert l2.daemon.wait_for_log("Attept to pay.*with wrong secret")
1783
1784
1785@pytest.mark.developer("Requires dev_sign_last_tx")
1786def test_watchtower(node_factory, bitcoind, directory, chainparams):
1787    """Test watchtower hook.
1788
1789    l1 and l2 open a channel, make a couple of updates and then l1 cheats on
1790    l2 while that one is offline. The watchtower plugin meanwhile stashes all
1791    the penalty transactions and we release the one matching the offending
1792    commitment transaction.
1793
1794    """
1795    p = os.path.join(os.path.dirname(__file__), "plugins/watchtower.py")
1796    l1, l2 = node_factory.line_graph(
1797        2,
1798        opts=[{'may_fail': True, 'allow_broken_log': True}, {'plugin': p}]
1799    )
1800    channel_id = l1.rpc.listpeers()['peers'][0]['channels'][0]['channel_id']
1801
1802    # Force a new commitment
1803    l1.rpc.pay(l2.rpc.invoice(25000000, 'lbl1', 'desc1')['bolt11'])
1804
1805    tx = l1.rpc.dev_sign_last_tx(l2.info['id'])['tx']
1806
1807    # Now make sure it is out of date
1808    l1.rpc.pay(l2.rpc.invoice(25000000, 'lbl2', 'desc2')['bolt11'])
1809
1810    # l2 stops watching the chain, allowing the watchtower to react
1811    l2.stop()
1812
1813    # Now l1 cheats
1814    bitcoind.rpc.sendrawtransaction(tx)
1815    time.sleep(1)
1816    bitcoind.generate_block(1)
1817
1818    wt_file = os.path.join(
1819        l2.daemon.lightning_dir,
1820        chainparams['name'],
1821        'watchtower.csv'
1822    )
1823
1824    cheat_tx = bitcoind.rpc.decoderawtransaction(tx)
1825    lastcommitnum = 0
1826    for l in open(wt_file, 'r'):
1827        txid, penalty, channel_id_hook, commitnum = l.strip().split(', ')
1828        assert lastcommitnum == int(commitnum)
1829        assert channel_id_hook == channel_id
1830        lastcommitnum += 1
1831        if txid == cheat_tx['txid']:
1832            # This one should succeed, since it is a response to the cheat_tx
1833            bitcoind.rpc.sendrawtransaction(penalty)
1834            break
1835
1836    # Need this to check that l2 gets the funds
1837    penalty_meta = bitcoind.rpc.decoderawtransaction(penalty)
1838
1839    time.sleep(1)
1840    bitcoind.generate_block(1)
1841
1842    # Make sure l2's normal penalty_tx doesn't reach the network
1843    def mock_sendrawtransaction(tx):
1844        print("NOT broadcasting", tx)
1845
1846    l2.daemon.rpcproxy.mock_rpc('sendrawtransaction', mock_sendrawtransaction)
1847
1848    # Restart l2, and it should continue where the watchtower left off:
1849    l2.start()
1850
1851    # l2 will still try to broadcast its latest commitment tx, but it'll fail
1852    # since l1 has cheated. All commitments share the same prefix, so look for
1853    # that.
1854    penalty_prefix = tx[:(4 + 1 + 36) * 2]  # version, txin_count, first txin in hex
1855    l2.daemon.wait_for_log(r'Expected error broadcasting tx {}'.format(penalty_prefix))
1856
1857    # Now make sure the penalty output ends up in our wallet
1858    fund_txids = [o['txid'] for o in l2.rpc.listfunds()['outputs']]
1859    assert(penalty_meta['txid'] in fund_txids)
1860
1861
1862def test_plugin_fail(node_factory):
1863    """Test that a plugin which fails (not during a command)"""
1864    plugin = os.path.join(os.path.dirname(__file__), 'plugins/fail_by_itself.py')
1865    l1 = node_factory.get_node(options={"plugin": plugin})
1866
1867    time.sleep(2)
1868    # It should clean up!
1869    assert 'failcmd' not in [h['command'] for h in l1.rpc.help()['help']]
1870    l1.daemon.wait_for_log(r': exited during normal operation')
1871
1872    l1.rpc.plugin_start(plugin)
1873    time.sleep(2)
1874    # It should clean up!
1875    assert 'failcmd' not in [h['command'] for h in l1.rpc.help()['help']]
1876    l1.daemon.wait_for_log(r': exited during normal operation')
1877
1878
1879@pytest.mark.developer("without DEVELOPER=1, gossip v slow")
1880@pytest.mark.openchannel('v1')
1881@pytest.mark.openchannel('v2')
1882def test_coin_movement_notices(node_factory, bitcoind, chainparams):
1883    """Verify that coin movements are triggered correctly.
1884    """
1885
1886    l1_l2_mvts = [
1887        {'type': 'chain_mvt', 'credit': 0, 'debit': 0, 'tag': 'deposit'},
1888        {'type': 'channel_mvt', 'credit': 100001001, 'debit': 0, 'tag': 'routed'},
1889        {'type': 'channel_mvt', 'credit': 0, 'debit': 50000000, 'tag': 'routed'},
1890        {'type': 'channel_mvt', 'credit': 100000000, 'debit': 0, 'tag': 'invoice'},
1891        {'type': 'channel_mvt', 'credit': 0, 'debit': 50000000, 'tag': 'invoice'},
1892        {'type': 'chain_mvt', 'credit': 0, 'debit': 1, 'tag': 'chain_fees'},
1893        {'type': 'chain_mvt', 'credit': 0, 'debit': 100001000, 'tag': 'withdrawal'},
1894    ]
1895    if chainparams['elements']:
1896        l2_l3_mvts = [
1897            {'type': 'chain_mvt', 'credit': 1000000000, 'debit': 0, 'tag': 'deposit'},
1898            {'type': 'channel_mvt', 'credit': 0, 'debit': 100000000, 'tag': 'routed'},
1899            {'type': 'channel_mvt', 'credit': 50000501, 'debit': 0, 'tag': 'routed'},
1900            {'type': 'chain_mvt', 'credit': 0, 'debit': 4271501, 'tag': 'chain_fees'},
1901            {'type': 'chain_mvt', 'credit': 0, 'debit': 945729000, 'tag': 'withdrawal'},
1902        ]
1903
1904        l2_wallet_mvts = [
1905            {'type': 'chain_mvt', 'credit': 2000000000, 'debit': 0, 'tag': 'deposit'},
1906            {'type': 'chain_mvt', 'credit': 0, 'debit': 0, 'tag': 'spend_track'},
1907            [
1908                {'type': 'chain_mvt', 'credit': 0, 'debit': 991908000, 'tag': 'withdrawal'},
1909                {'type': 'chain_mvt', 'credit': 0, 'debit': 1000000000, 'tag': 'withdrawal'},
1910            ],
1911            {'type': 'chain_mvt', 'credit': 0, 'debit': 8092000, 'tag': 'chain_fees'},
1912            {'type': 'chain_mvt', 'credit': 991908000, 'debit': 0, 'tag': 'deposit'},
1913            {'type': 'chain_mvt', 'credit': 100001000, 'debit': 0, 'tag': 'deposit'},
1914            {'type': 'chain_mvt', 'credit': 945729000, 'debit': 0, 'tag': 'deposit'},
1915        ]
1916    elif EXPERIMENTAL_FEATURES:
1917        # option_anchor_outputs
1918        l2_l3_mvts = [
1919            {'type': 'chain_mvt', 'credit': 1000000000, 'debit': 0, 'tag': 'deposit'},
1920            {'type': 'channel_mvt', 'credit': 0, 'debit': 100000000, 'tag': 'routed'},
1921            {'type': 'channel_mvt', 'credit': 50000501, 'debit': 0, 'tag': 'routed'},
1922            {'type': 'chain_mvt', 'credit': 0, 'debit': 2520501, 'tag': 'chain_fees'},
1923            {'type': 'chain_mvt', 'credit': 0, 'debit': 947480000, 'tag': 'withdrawal'},
1924        ]
1925
1926        l2_wallet_mvts = [
1927            {'type': 'chain_mvt', 'credit': 2000000000, 'debit': 0, 'tag': 'deposit'},
1928            {'type': 'chain_mvt', 'credit': 0, 'debit': 0, 'tag': 'spend_track'},
1929            # Could go in either order
1930            [
1931                {'type': 'chain_mvt', 'credit': 0, 'debit': 995433000, 'tag': 'withdrawal'},
1932                {'type': 'chain_mvt', 'credit': 0, 'debit': 1000000000, 'tag': 'withdrawal'},
1933            ],
1934            {'type': 'chain_mvt', 'credit': 0, 'debit': 4567000, 'tag': 'chain_fees'},
1935            {'type': 'chain_mvt', 'credit': 995433000, 'debit': 0, 'tag': 'deposit'},
1936            {'type': 'chain_mvt', 'credit': 100001000, 'debit': 0, 'tag': 'deposit'},
1937            {'type': 'chain_mvt', 'credit': 947480000, 'debit': 0, 'tag': 'deposit'},
1938        ]
1939    else:
1940        l2_l3_mvts = [
1941            {'type': 'chain_mvt', 'credit': 1000000000, 'debit': 0, 'tag': 'deposit'},
1942            {'type': 'channel_mvt', 'credit': 0, 'debit': 100000000, 'tag': 'routed'},
1943            {'type': 'channel_mvt', 'credit': 50000501, 'debit': 0, 'tag': 'routed'},
1944            {'type': 'chain_mvt', 'credit': 0, 'debit': 2520501, 'tag': 'chain_fees'},
1945            {'type': 'chain_mvt', 'credit': 0, 'debit': 947480000, 'tag': 'withdrawal'},
1946        ]
1947
1948        l2_wallet_mvts = [
1949            {'type': 'chain_mvt', 'credit': 2000000000, 'debit': 0, 'tag': 'deposit'},
1950            {'type': 'chain_mvt', 'credit': 0, 'debit': 0, 'tag': 'spend_track'},
1951            # Could go in either order
1952            [
1953                {'type': 'chain_mvt', 'credit': 0, 'debit': 995433000, 'tag': 'withdrawal'},
1954                {'type': 'chain_mvt', 'credit': 0, 'debit': 1000000000, 'tag': 'withdrawal'},
1955            ],
1956            {'type': 'chain_mvt', 'credit': 0, 'debit': 4567000, 'tag': 'chain_fees'},
1957            {'type': 'chain_mvt', 'credit': 995433000, 'debit': 0, 'tag': 'deposit'},
1958            {'type': 'chain_mvt', 'credit': 100001000, 'debit': 0, 'tag': 'deposit'},
1959            {'type': 'chain_mvt', 'credit': 947480000, 'debit': 0, 'tag': 'deposit'},
1960        ]
1961
1962    l1, l2, l3 = node_factory.line_graph(3, opts=[
1963        {'may_reconnect': True},
1964        {'may_reconnect': True, 'plugin': os.path.join(os.getcwd(), 'tests/plugins/coin_movements.py')},
1965        {'may_reconnect': True},
1966    ], wait_for_announce=True)
1967
1968    # Special case for dual-funded channel opens
1969    if l2.config('experimental-dual-fund'):
1970        # option_anchor_outputs
1971        l2_l3_mvts = [
1972            {'type': 'chain_mvt', 'credit': 1000000000, 'debit': 0, 'tag': 'deposit'},
1973            {'type': 'channel_mvt', 'credit': 0, 'debit': 100000000, 'tag': 'routed'},
1974            {'type': 'channel_mvt', 'credit': 50000501, 'debit': 0, 'tag': 'routed'},
1975            {'type': 'chain_mvt', 'credit': 0, 'debit': 2520501, 'tag': 'chain_fees'},
1976            {'type': 'chain_mvt', 'credit': 0, 'debit': 947480000, 'tag': 'withdrawal'},
1977        ]
1978        l2_wallet_mvts = [
1979            {'type': 'chain_mvt', 'credit': 2000000000, 'debit': 0, 'tag': 'deposit'},
1980            {'type': 'chain_mvt', 'credit': 0, 'debit': 0, 'tag': 'spend_track'},
1981            # Could go in either order
1982            [
1983                {'type': 'chain_mvt', 'credit': 0, 'debit': 995410000, 'tag': 'withdrawal'},
1984                {'type': 'chain_mvt', 'credit': 0, 'debit': 1000000000, 'tag': 'withdrawal'},
1985            ],
1986            {'type': 'chain_mvt', 'credit': 0, 'debit': 4590000, 'tag': 'chain_fees'},
1987            {'type': 'chain_mvt', 'credit': 995410000, 'debit': 0, 'tag': 'deposit'},
1988            {'type': 'chain_mvt', 'credit': 100001000, 'debit': 0, 'tag': 'deposit'},
1989            {'type': 'chain_mvt', 'credit': 947480000, 'debit': 0, 'tag': 'deposit'},
1990        ]
1991
1992    bitcoind.generate_block(5)
1993    wait_for(lambda: len(l1.rpc.listchannels()['channels']) == 4)
1994    amount = 10**8
1995
1996    inv = l3.rpc.invoice(amount, "first", "desc")
1997    payment_hash13 = inv['payment_hash']
1998    route = l1.rpc.getroute(l3.info['id'], amount, 1)['route']
1999
2000    # status: offered -> settled
2001    l1.rpc.sendpay(route, payment_hash13, payment_secret=inv['payment_secret'])
2002    l1.rpc.waitsendpay(payment_hash13)
2003
2004    # status: offered -> failed
2005    route = l1.rpc.getroute(l3.info['id'], amount, 1)['route']
2006    payment_hash13 = "f" * 64
2007    with pytest.raises(RpcError):
2008        l1.rpc.sendpay(route, payment_hash13, payment_secret=inv['payment_secret'])
2009        l1.rpc.waitsendpay(payment_hash13)
2010
2011    # go the other direction
2012    inv = l1.rpc.invoice(amount // 2, "first", "desc")
2013    payment_hash31 = inv['payment_hash']
2014    route = l3.rpc.getroute(l1.info['id'], amount // 2, 1)['route']
2015    l3.rpc.sendpay(route, payment_hash31, payment_secret=inv['payment_secret'])
2016    l3.rpc.waitsendpay(payment_hash31)
2017
2018    # receive a payment (endpoint)
2019    inv = l2.rpc.invoice(amount, "first", "desc")
2020    payment_hash12 = inv['payment_hash']
2021    route = l1.rpc.getroute(l2.info['id'], amount, 1)['route']
2022    l1.rpc.sendpay(route, payment_hash12, payment_secret=inv['payment_secret'])
2023    l1.rpc.waitsendpay(payment_hash12)
2024
2025    # send a payment (originator)
2026    inv = l1.rpc.invoice(amount // 2, "second", "desc")
2027    payment_hash21 = inv['payment_hash']
2028    route = l2.rpc.getroute(l1.info['id'], amount // 2, 1)['route']
2029    l2.rpc.sendpay(route, payment_hash21, payment_secret=inv['payment_secret'])
2030    l2.rpc.waitsendpay(payment_hash21)
2031
2032    # restart to test index
2033    l2.restart()
2034    wait_for(lambda: all(p['channels'][0]['state'] == 'CHANNELD_NORMAL' for p in l2.rpc.listpeers()['peers']))
2035
2036    # close the channels down
2037    chan1 = l2.get_channel_scid(l1)
2038    chan3 = l2.get_channel_scid(l3)
2039    chanid_1 = first_channel_id(l2, l1)
2040    chanid_3 = first_channel_id(l2, l3)
2041
2042    l2.rpc.close(chan1)
2043    l2.daemon.wait_for_logs([
2044        ' to CLOSINGD_COMPLETE',
2045        'sendrawtx exit 0',
2046    ])
2047    assert account_balance(l2, chanid_1) == 100001001
2048    bitcoind.generate_block(6)
2049    sync_blockheight(bitcoind, [l2])
2050    l2.daemon.wait_for_log('{}.*FUNDING_TRANSACTION/FUNDING_OUTPUT->MUTUAL_CLOSE depth'.format(l1.info['id']))
2051
2052    l2.rpc.close(chan3)
2053    l2.daemon.wait_for_logs([
2054        ' to CLOSINGD_COMPLETE',
2055        'sendrawtx exit 0',
2056    ])
2057    assert account_balance(l2, chanid_3) == 950000501
2058    bitcoind.generate_block(6)
2059    sync_blockheight(bitcoind, [l2])
2060    l2.daemon.wait_for_log('{}.*FUNDING_TRANSACTION/FUNDING_OUTPUT->MUTUAL_CLOSE depth'.format(l3.info['id']))
2061
2062    # Ending channel balance should be zero
2063    assert account_balance(l2, chanid_1) == 0
2064    assert account_balance(l2, chanid_3) == 0
2065
2066    # Verify we recorded all the movements we expect
2067    check_coin_moves(l2, chanid_1, l1_l2_mvts, chainparams)
2068    check_coin_moves(l2, chanid_3, l2_l3_mvts, chainparams)
2069    check_coin_moves(l2, 'wallet', l2_wallet_mvts, chainparams)
2070    check_coin_moves_idx(l2)
2071
2072
2073def test_3847_repro(node_factory, bitcoind):
2074    """Reproduces the issue in #3847: duplicate response from plugin
2075
2076    l2 holds on to HTLCs until the deadline expires. Then we allow them
2077    through and either should terminate the payment attempt, and the second
2078    would return a redundant result.
2079
2080    """
2081    l1, l2, l3 = node_factory.line_graph(3, opts=[
2082        {},
2083        {},
2084        {
2085            'plugin': os.path.join(os.getcwd(), 'tests/plugins/hold_htlcs.py'),
2086            'hold-time': 11,
2087            'hold-result': 'fail',
2088        },
2089    ], wait_for_announce=True)
2090    wait_for(lambda: len(l1.rpc.listchannels()['channels']) == 4)
2091
2092    # Amount sufficient to trigger the presplit modifier
2093    amt = 20 * 1000 * 1000
2094
2095    i1 = l3.rpc.invoice(
2096        msatoshi=amt, label="direct", description="desc"
2097    )['bolt11']
2098    with pytest.raises(RpcError):
2099        l1.rpc.pay(i1, retry_for=10)
2100
2101    # We wait for at least two parts, and the bug would cause the `pay` plugin
2102    # to crash
2103    l1.daemon.wait_for_logs([r'Payment deadline expired, not retrying'] * 2)
2104
2105    # This call to paystatus would fail if the pay plugin crashed (it's
2106    # provided by the plugin)
2107    l1.rpc.paystatus(i1)
2108
2109
2110def test_important_plugin(node_factory):
2111    # Cache it here.
2112    pluginsdir = os.path.join(os.path.dirname(__file__), "plugins")
2113
2114    n = node_factory.get_node(options={"important-plugin": os.path.join(pluginsdir, "nonexistent")},
2115                              may_fail=True, expect_fail=True,
2116                              allow_broken_log=True, start=False)
2117    n.daemon.start(wait_for_initialized=False, stderr=subprocess.PIPE)
2118    wait_for(lambda: not n.daemon.running)
2119
2120    assert n.daemon.is_in_stderr(r"error starting plugin '.*nonexistent'")
2121
2122    # We use a log file, since our wait_for_log is unreliable when the
2123    # daemon actually dies.
2124    def get_logfile_match(logpath, regex):
2125        if not os.path.exists(logpath):
2126            return None
2127        with open(logpath, 'r') as f:
2128            for line in f.readlines():
2129                m = re.search(regex, line)
2130                if m is not None:
2131                    return m
2132        return None
2133
2134    logpath = os.path.join(n.daemon.lightning_dir, TEST_NETWORK, 'logfile')
2135    n.daemon.opts['log-file'] = 'logfile'
2136
2137    # Check we exit if the important plugin dies.
2138    n.daemon.opts['important-plugin'] = os.path.join(pluginsdir, "fail_by_itself.py")
2139
2140    n.daemon.start(wait_for_initialized=False)
2141    wait_for(lambda: not n.daemon.running)
2142
2143    assert get_logfile_match(logpath,
2144                             r'fail_by_itself.py: Plugin marked as important, shutting down lightningd')
2145    os.remove(logpath)
2146
2147    # Check if the important plugin is disabled, we run as normal.
2148    n.daemon.opts['disable-plugin'] = "fail_by_itself.py"
2149    del n.daemon.opts['log-file']
2150    n.daemon.start()
2151    # Make sure we can call into a plugin RPC (this is from `bcli`) even
2152    # if fail_by_itself.py is disabled.
2153    n.rpc.call("estimatefees", {})
2154    # Make sure we are still running.
2155    assert n.daemon.running
2156    n.stop()
2157
2158    # Check if an important plugin dies later, we fail.
2159    del n.daemon.opts['disable-plugin']
2160    n.daemon.opts['log-file'] = 'logfile'
2161    n.daemon.opts['important-plugin'] = os.path.join(pluginsdir, "suicidal_plugin.py")
2162
2163    n.daemon.start(wait_for_initialized=False)
2164    wait_for(lambda: get_logfile_match(logpath, "Server started with public key"))
2165
2166    with pytest.raises(RpcError):
2167        n.rpc.call("die", {})
2168
2169    wait_for(lambda: not n.daemon.running)
2170    assert get_logfile_match(logpath, 'suicidal_plugin.py: Plugin marked as important, shutting down lightningd')
2171    os.remove(logpath)
2172
2173    # Check that if a builtin plugin dies, we fail.
2174    n.daemon.start(wait_for_initialized=False)
2175
2176    wait_for(lambda: get_logfile_match(logpath, r'.*started\(([0-9]*)\).*plugins/pay'))
2177    pidstr = get_logfile_match(logpath, r'.*started\(([0-9]*)\).*plugins/pay').group(1)
2178
2179    # Kill pay.
2180    os.kill(int(pidstr), signal.SIGKILL)
2181    wait_for(lambda: not n.daemon.running)
2182
2183    assert get_logfile_match(logpath, 'pay: Plugin marked as important, shutting down lightningd')
2184
2185
2186@pytest.mark.developer("tests developer-only option.")
2187def test_dev_builtin_plugins_unimportant(node_factory):
2188    n = node_factory.get_node(options={"dev-builtin-plugins-unimportant": None})
2189    n.rpc.plugin_stop(plugin="pay")
2190
2191
2192def test_htlc_accepted_hook_crash(node_factory, executor):
2193    """Test that we do not hang incoming HTLCs if the hook plugin crashes.
2194
2195    Reproduces #3748.
2196    """
2197    plugin = os.path.join(os.getcwd(), 'tests/plugins/htlc_accepted-crash.py')
2198    l1 = node_factory.get_node()
2199    l2 = node_factory.get_node(
2200        options={'plugin': plugin},
2201        allow_broken_log=True
2202    )
2203    l1.connect(l2)
2204    l1.fundchannel(l2)
2205
2206    i = l2.rpc.invoice(500, "crashpls", "crashpls")['bolt11']
2207
2208    # This should still succeed
2209
2210    f = executor.submit(l1.rpc.pay, i)
2211
2212    l2.daemon.wait_for_log(r'Crashing on purpose...')
2213    l2.daemon.wait_for_log(
2214        r'Hook handler for htlc_accepted failed with an exception.'
2215    )
2216
2217    with pytest.raises(RpcError, match=r'failed: WIRE_TEMPORARY_NODE_FAILURE'):
2218        f.result(10)
2219
2220
2221def test_notify(node_factory):
2222    """Test that notifications from plugins get ignored"""
2223    plugins = [os.path.join(os.getcwd(), 'tests/plugins/notify.py'),
2224               os.path.join(os.getcwd(), 'tests/plugins/notify2.py')]
2225    l1 = node_factory.get_node(options={'plugin': plugins})
2226
2227    assert l1.rpc.call('make_notify') == 'This worked'
2228    assert l1.rpc.call('call_make_notify') == 'This worked'
2229
2230    out = subprocess.check_output(['cli/lightning-cli',
2231                                   '--network={}'.format(TEST_NETWORK),
2232                                   '--lightning-dir={}'
2233                                   .format(l1.daemon.lightning_dir),
2234                                   'make_notify']).decode('utf-8').splitlines(keepends=True)
2235    assert out[0] == '# Beginning stage 1\n'
2236    assert out[1] == '\r'
2237    for i in range(100):
2238        assert out[2 + i].startswith("# Stage 1/2 {:>3}/100 |".format(1 + i))
2239        if i == 99:
2240            assert out[2 + i].endswith("|\n")
2241        else:
2242            assert out[2 + i].endswith("|\r")
2243
2244    assert out[102] == '# Beginning stage 2\n'
2245    assert out[103] == '\r'
2246
2247    for i in range(10):
2248        assert out[104 + i].startswith("# Stage 2/2 {:>2}/10 |".format(1 + i))
2249        if i == 9:
2250            assert out[104 + i].endswith("|\n")
2251        else:
2252            assert out[104 + i].endswith("|\r")
2253    assert out[114] == '"This worked"\n'
2254    assert len(out) == 115
2255
2256    # At debug level, we get the second prompt.
2257    out = subprocess.check_output(['cli/lightning-cli',
2258                                   '--network={}'.format(TEST_NETWORK),
2259                                   '--lightning-dir={}'
2260                                   .format(l1.daemon.lightning_dir),
2261                                   '-N', 'debug',
2262                                   'make_notify']).decode('utf-8').splitlines()
2263    assert out[0] == '# Beginning stage 1'
2264    assert out[1] == ''
2265    for i in range(100):
2266        assert out[2 + i].startswith("# Stage 1/2 {:>3}/100 |".format(1 + i))
2267        assert out[2 + i].endswith("|")
2268    assert out[102] == '# Beginning stage 2'
2269    assert out[103] == ''
2270    for i in range(10):
2271        assert out[104 + i].startswith("# Stage 2/2 {:>2}/10 |".format(1 + i))
2272        assert out[104 + i].endswith("|")
2273    assert out[114] == '"This worked"'
2274    assert len(out) == 115
2275
2276    # none suppresses
2277    out = subprocess.check_output(['cli/lightning-cli',
2278                                   '--network={}'.format(TEST_NETWORK),
2279                                   '--lightning-dir={}'
2280                                   .format(l1.daemon.lightning_dir),
2281                                   '--notifications=none',
2282                                   'make_notify']).decode('utf-8').splitlines()
2283    assert out == ['"This worked"']
2284
2285
2286def test_htlc_accepted_hook_failcodes(node_factory):
2287    plugin = os.path.join(os.path.dirname(__file__), 'plugins/htlc_accepted-failcode.py')
2288    l1, l2 = node_factory.line_graph(2, opts=[{}, {'plugin': plugin}])
2289
2290    # First let's test the newer failure_message, which should get passed
2291    # through without being mapped.
2292    tests = {
2293        '2002': 'WIRE_TEMPORARY_NODE_FAILURE',
2294        '400F' + 12 * '00': 'WIRE_INCORRECT_OR_UNKNOWN_PAYMENT_DETAILS',
2295        '4009': 'WIRE_REQUIRED_CHANNEL_FEATURE_MISSING',
2296        '4016' + 3 * '00': 'WIRE_INVALID_ONION_PAYLOAD',
2297    }
2298
2299    for failmsg, expected in tests.items():
2300        l2.rpc.setfailcode(msg=failmsg)
2301        inv = l2.rpc.invoice(42, 'failmsg{}'.format(failmsg), '')['bolt11']
2302        with pytest.raises(RpcError, match=r'failcodename.: .{}.'.format(expected)):
2303            l1.rpc.pay(inv)
2304
2305    # And now test the older failcode return value. This is deprecated and can
2306    # be removed once we have removed the failcode correction code in
2307    # peer_htlcs.c. The following ones get remapped
2308    tests.update({
2309        '400F': 'WIRE_TEMPORARY_NODE_FAILURE',
2310        '4009': 'WIRE_TEMPORARY_NODE_FAILURE',
2311        '4016': 'WIRE_TEMPORARY_NODE_FAILURE',
2312    })
2313
2314    for failcode, expected in tests.items():
2315        # Do not attempt with full messages
2316        if len(failcode) > 4:
2317            continue
2318
2319        l2.rpc.setfailcode(code=failcode)
2320        inv = l2.rpc.invoice(42, 'failcode{}'.format(failcode), '')['bolt11']
2321        with pytest.raises(RpcError, match=r'failcodename.: .{}.'.format(expected)):
2322            l1.rpc.pay(inv)
2323
2324
2325def test_hook_dep(node_factory):
2326    dep_a = os.path.join(os.path.dirname(__file__), 'plugins/dep_a.py')
2327    dep_b = os.path.join(os.path.dirname(__file__), 'plugins/dep_b.py')
2328    dep_c = os.path.join(os.path.dirname(__file__), 'plugins/dep_c.py')
2329    l1, l2, l3 = node_factory.line_graph(3, opts=[{},
2330                                                  {'plugin': dep_b},
2331                                                  {'plugin': [dep_a, dep_b]}])
2332
2333    # l2 complains about the two unknown plugins, only.
2334    # (Could be already past)
2335    l2.daemon.logsearch_start = 0
2336    l2.daemon.wait_for_logs(["unknown plugin dep_a.py",
2337                             "unknown plugin dep_c.py"])
2338    assert not l2.daemon.is_in_log("unknown plugin (?!dep_a.py|dep_c.py)")
2339    logstart = l2.daemon.logsearch_start
2340
2341    # l3 complains about the dep_c, only.
2342    assert l3.daemon.is_in_log("unknown plugin dep_c.py")
2343    assert not l3.daemon.is_in_log("unknown plugin (?!dep_c.py)")
2344
2345    # A says it has to be before B.
2346    l2.rpc.plugin_start(plugin=dep_a)
2347    l2.daemon.wait_for_log(r"started.*dep_a.py")
2348    # Still doesn't know about c.
2349    assert l2.daemon.is_in_log("unknown plugin dep_c.py", logstart)
2350
2351    l1.pay(l2, 100000)
2352    # They must be called in this order!
2353    l2.daemon.wait_for_log(r"dep_a.py: htlc_accepted called")
2354    l2.daemon.wait_for_log(r"dep_b.py: htlc_accepted called")
2355
2356    # But depc will not load, due to cyclical dep
2357    with pytest.raises(RpcError, match=r'Cannot meet required hook dependencies'):
2358        l2.rpc.plugin_start(plugin=dep_c)
2359
2360    l1.rpc.plugin_start(plugin=dep_c)
2361    l1.daemon.wait_for_log(r"started.*dep_c.py")
2362
2363    # Complaints about unknown plugin a, but nothing else
2364    assert l1.daemon.is_in_log("unknown plugin dep_a.py")
2365    assert not l1.daemon.is_in_log("unknown plugin (?!dep_a.py)")
2366
2367
2368def test_hook_dep_stable(node_factory):
2369    # Load in order A, D, E, B.
2370    # A says it has to be before B, D says it has to be before E.
2371    # It should load in the order specified.
2372
2373    dep_a = os.path.join(os.path.dirname(__file__), 'plugins/dep_a.py')
2374    dep_b = os.path.join(os.path.dirname(__file__), 'plugins/dep_b.py')
2375    dep_d = os.path.join(os.path.dirname(__file__), 'plugins/dep_d.py')
2376    dep_e = os.path.join(os.path.dirname(__file__), 'plugins/dep_e.py')
2377    l1, l2 = node_factory.line_graph(2, opts=[{},
2378                                              {'plugin': [dep_a, dep_d, dep_e, dep_b]}])
2379
2380    # dep_a mentions deb_c, but nothing else should be unknown.
2381    # (Could be already past)
2382    l2.daemon.logsearch_start = 0
2383    l2.daemon.wait_for_log("unknown plugin dep_c.py")
2384    assert not l2.daemon.is_in_log("unknown plugin (?!|dep_c.py)")
2385
2386    l1.pay(l2, 100000)
2387    # They must be called in this order!
2388    l2.daemon.wait_for_log(r"dep_a.py: htlc_accepted called")
2389    l2.daemon.wait_for_log(r"dep_d.py: htlc_accepted called")
2390    l2.daemon.wait_for_log(r"dep_e.py: htlc_accepted called")
2391    l2.daemon.wait_for_log(r"dep_b.py: htlc_accepted called")
2392
2393
2394def test_htlc_accepted_hook_failonion(node_factory):
2395    plugin = os.path.join(os.path.dirname(__file__), 'plugins/htlc_accepted-failonion.py')
2396    l1, l2 = node_factory.line_graph(2, opts=[{}, {'plugin': plugin}])
2397
2398    # an invalid onion
2399    l2.rpc.setfailonion('0' * (292 * 2))
2400    inv = l2.rpc.invoice(42, 'failonion000', '')['bolt11']
2401    with pytest.raises(RpcError):
2402        l1.rpc.pay(inv)
2403
2404
2405def test_dynamic_args(node_factory):
2406    plugin_path = os.path.join(os.getcwd(), 'contrib/plugins/helloworld.py')
2407
2408    l1 = node_factory.get_node()
2409    l1.rpc.plugin_start(plugin_path, greeting='Test arg parsing')
2410
2411    assert l1.rpc.call("hello") == "Test arg parsing world"
2412    plugin = only_one([p for p in l1.rpc.listconfigs()['plugins'] if p['path'] == plugin_path])
2413    assert plugin['options']['greeting'] == 'Test arg parsing'
2414
2415    l1.rpc.plugin_stop(plugin_path)
2416
2417    assert [p for p in l1.rpc.listconfigs()['plugins'] if p['path'] == plugin_path] == []
2418
2419
2420def test_pyln_request_notify(node_factory):
2421    """Test that pyln-client plugins can send notifications.
2422    """
2423    plugin_path = os.path.join(
2424        os.path.dirname(__file__), 'plugins/countdown.py'
2425    )
2426    l1 = node_factory.get_node(options={'plugin': plugin_path})
2427    notifications = []
2428
2429    def n(*args, message, **kwargs):
2430        print("Got a notification:", message)
2431        notifications.append(message)
2432
2433    with l1.rpc.notify(n):
2434        l1.rpc.countdown(10)
2435
2436    expected = ['{}/10'.format(i) for i in range(10)]
2437    assert expected == notifications
2438
2439    # Calling without the context manager we should not get any notifications
2440    notifications = []
2441    l1.rpc.countdown(10)
2442    assert notifications == []
2443
2444
2445def test_self_disable(node_factory):
2446    """Test that plugin can disable itself without penalty.
2447    """
2448    # This disables in response to getmanifest.
2449    p1 = os.path.join(
2450        os.path.dirname(__file__), 'plugins/test_selfdisable_after_getmanifest'
2451    )
2452    # This disables in response to init.
2453    p2 = os.path.join(os.getcwd(), "tests/plugins/test_libplugin")
2454
2455    pydisable = os.path.join(
2456        os.path.dirname(__file__), 'plugins/selfdisable.py'
2457    )
2458    l1 = node_factory.get_node(options={'important-plugin': [p1, p2],
2459                                        'plugin': pydisable,
2460                                        'selfdisable': None})
2461
2462    # Could happen before it gets set up.
2463    l1.daemon.logsearch_start = 0
2464    l1.daemon.wait_for_logs(['test_selfdisable_after_getmanifest: .* disabled itself: Self-disable test after getmanifest',
2465                             'test_libplugin: .* disabled itself at init: Disabled via selfdisable option',
2466                             'selfdisable.py: .* disabled itself at init: init saying disable'])
2467
2468    assert p1 not in [p['name'] for p in l1.rpc.plugin_list()['plugins']]
2469    assert p2 not in [p['name'] for p in l1.rpc.plugin_list()['plugins']]
2470    assert pydisable not in [p['name'] for p in l1.rpc.plugin_list()['plugins']]
2471
2472    # Also works with dynamic load attempts
2473    with pytest.raises(RpcError, match="Self-disable test after getmanifest"):
2474        l1.rpc.plugin_start(p1)
2475
2476    # Also works with dynamic load attempts
2477    with pytest.raises(RpcError, match="Disabled via selfdisable option"):
2478        l1.rpc.plugin_start(p2, selfdisable=True)
2479
2480
2481def test_custom_notification_topics(node_factory):
2482    plugin = os.path.join(
2483        os.path.dirname(__file__), "plugins", "custom_notifications.py"
2484    )
2485    l1, l2 = node_factory.line_graph(2, opts=[{'plugin': plugin}, {}])
2486    l1.rpc.emit()
2487    l1.daemon.wait_for_log(r'Got a custom notification Hello world')
2488
2489    inv = l2.rpc.invoice(42, "lbl", "desc")['bolt11']
2490    l1.rpc.pay(inv)
2491
2492    l1.daemon.wait_for_log(r'Got a pay_success notification from plugin pay for payment_hash [0-9a-f]{64}')
2493
2494    # And now make sure that we drop unannounced notifications
2495    l1.rpc.faulty_emit()
2496    l1.daemon.wait_for_log(
2497        r"Plugin attempted to send a notification to topic .* not forwarding"
2498    )
2499    time.sleep(1)
2500    assert not l1.daemon.is_in_log(r'Got the ididntannouncethis event')
2501
2502    # The plugin just dist what previously was a fatal mistake (emit
2503    # an unknown notification), make sure we didn't kill it.
2504    assert 'custom_notifications.py' in [p['name'] for p in l1.rpc.listconfigs()['plugins']]
2505
2506
2507def test_restart_on_update(node_factory):
2508    """Tests if plugin rescan restarts modified plugins
2509    """
2510    # we need to write plugin content dynamically
2511    content = """#!/usr/bin/env python3
2512from pyln.client import Plugin
2513import time
2514plugin = Plugin()
2515@plugin.init()
2516def init(options, configuration, plugin):
2517    plugin.log("test_restart_on_update %s")
2518plugin.run()
2519    """
2520
2521    # get a node that is not started so we can put a plugin in its lightning_dir
2522    n = node_factory.get_node(start=False)
2523    lndir = n.daemon.lightning_dir
2524
2525    # write hello world plugin to lndir/plugins
2526    os.makedirs(os.path.join(lndir, 'plugins'), exist_ok=True)
2527    path = os.path.join(lndir, 'plugins', 'test_restart_on_update.py')
2528    file = open(path, 'w+')
2529    file.write(content % "1")
2530    file.close()
2531    os.chmod(path, os.stat(path).st_mode | stat.S_IEXEC)
2532
2533    # now fire up the node and wait for the plugin to print hello
2534    n.daemon.start()
2535    n.daemon.logsearch_start = 0
2536    n.daemon.wait_for_log(r"test_restart_on_update 1")
2537
2538    # a rescan should not yet reload the plugin on the same file
2539    n.rpc.plugin_rescan()
2540    assert not n.daemon.is_in_log(r"Plugin changed, needs restart.")
2541
2542    # modify the file
2543    file = open(path, 'w+')
2544    file.write(content % "2")
2545    file.close()
2546    os.chmod(path, os.stat(path).st_mode | stat.S_IEXEC)
2547
2548    # rescan and check
2549    n.rpc.plugin_rescan()
2550    n.daemon.wait_for_log(r"Plugin changed, needs restart.")
2551    n.daemon.wait_for_log(r"test_restart_on_update 2")
2552    n.stop()
2553
2554
2555def test_plugin_shutdown(node_factory):
2556    """test 'shutdown' notification"""
2557    p = os.path.join(os.getcwd(), "tests/plugins/test_libplugin")
2558    l1 = node_factory.get_node(options={'plugin': p})
2559
2560    l1.rpc.plugin_stop(p)
2561    l1.daemon.wait_for_log(r"test_libplugin: shutdown called")
2562    # FIXME: clean this up!
2563    l1.daemon.wait_for_log(r"test_libplugin: Killing plugin: exited during normal operation")
2564
2565    # Now try timeout.
2566    l1.rpc.plugin_start(p, dont_shutdown=True)
2567    l1.rpc.plugin_stop(p)
2568    l1.daemon.wait_for_log(r"test_libplugin: shutdown called")
2569    l1.daemon.wait_for_log(r"test_libplugin: Timeout on shutdown: killing anyway")
2570
2571    # Now, should also shutdown on finish.
2572    l1.rpc.plugin_start(p)
2573    l1.rpc.stop()
2574    l1.daemon.wait_for_log(r"test_libplugin: shutdown called")
2575