1#!/usr/bin/env python3 2# Copyright (c) 2014-2016 The Bitcoin Core developers 3# Distributed under the MIT software license, see the accompanying 4# file COPYING or http://www.opensource.org/licenses/mit-license.php. 5 6 7# 8# Helpful routines for regression testing 9# 10 11import os 12import sys 13 14from binascii import hexlify, unhexlify 15from base64 import b64encode 16from decimal import Decimal, ROUND_DOWN 17import json 18import http.client 19import random 20import shutil 21import subprocess 22import time 23import re 24import errno 25 26from . import coverage 27from .authproxy import AuthServiceProxy, JSONRPCException 28 29COVERAGE_DIR = None 30 31# The maximum number of nodes a single test can spawn 32MAX_NODES = 8 33# Don't assign rpc or p2p ports lower than this 34PORT_MIN = 11000 35# The number of ports to "reserve" for p2p and rpc, each 36PORT_RANGE = 5000 37 38BITCOIND_PROC_WAIT_TIMEOUT = 60 39 40 41class PortSeed: 42 # Must be initialized with a unique integer for each process 43 n = None 44 45#Set Mocktime default to OFF. 46#MOCKTIME is only needed for scripts that use the 47#cached version of the blockchain. If the cached 48#version of the blockchain is used without MOCKTIME 49#then the mempools will not sync due to IBD. 50MOCKTIME = 0 51 52def enable_mocktime(): 53 #For backwared compatibility of the python scripts 54 #with previous versions of the cache, set MOCKTIME 55 #to Jan 1, 2014 + (201 * 10 * 60) 56 global MOCKTIME 57 MOCKTIME = 1388534400 + (201 * 10 * 60) 58 59def disable_mocktime(): 60 global MOCKTIME 61 MOCKTIME = 0 62 63def get_mocktime(): 64 return MOCKTIME 65 66def enable_coverage(dirname): 67 """Maintain a log of which RPC calls are made during testing.""" 68 global COVERAGE_DIR 69 COVERAGE_DIR = dirname 70 71 72def get_rpc_proxy(url, node_number, timeout=None): 73 """ 74 Args: 75 url (str): URL of the RPC server to call 76 node_number (int): the node number (or id) that this calls to 77 78 Kwargs: 79 timeout (int): HTTP timeout in seconds 80 81 Returns: 82 AuthServiceProxy. convenience object for making RPC calls. 83 84 """ 85 proxy_kwargs = {} 86 if timeout is not None: 87 proxy_kwargs['timeout'] = timeout 88 89 proxy = AuthServiceProxy(url, **proxy_kwargs) 90 proxy.url = url # store URL on proxy for info 91 92 coverage_logfile = coverage.get_filename( 93 COVERAGE_DIR, node_number) if COVERAGE_DIR else None 94 95 return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile) 96 97 98def p2p_port(n): 99 assert(n <= MAX_NODES) 100 return PORT_MIN + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES) 101 102def rpc_port(n): 103 return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES) 104 105def check_json_precision(): 106 """Make sure json library being used does not lose precision converting BTC values""" 107 n = Decimal("20000000.00000003") 108 satoshis = int(json.loads(json.dumps(float(n)))*1.0e8) 109 if satoshis != 2000000000000003: 110 raise RuntimeError("JSON encode/decode loses precision") 111 112def count_bytes(hex_string): 113 return len(bytearray.fromhex(hex_string)) 114 115def bytes_to_hex_str(byte_str): 116 return hexlify(byte_str).decode('ascii') 117 118def hex_str_to_bytes(hex_str): 119 return unhexlify(hex_str.encode('ascii')) 120 121def str_to_b64str(string): 122 return b64encode(string.encode('utf-8')).decode('ascii') 123 124def sync_blocks(rpc_connections, wait=1, timeout=60): 125 """ 126 Wait until everybody has the same tip 127 """ 128 while timeout > 0: 129 tips = [ x.getbestblockhash() for x in rpc_connections ] 130 if tips == [ tips[0] ]*len(tips): 131 return True 132 time.sleep(wait) 133 timeout -= wait 134 raise AssertionError("Block sync failed") 135 136def sync_mempools(rpc_connections, wait=1, timeout=60): 137 """ 138 Wait until everybody has the same transactions in their memory 139 pools 140 """ 141 while timeout > 0: 142 pool = set(rpc_connections[0].getrawmempool()) 143 num_match = 1 144 for i in range(1, len(rpc_connections)): 145 if set(rpc_connections[i].getrawmempool()) == pool: 146 num_match = num_match+1 147 if num_match == len(rpc_connections): 148 return True 149 time.sleep(wait) 150 timeout -= wait 151 raise AssertionError("Mempool sync failed") 152 153bitcoind_processes = {} 154 155def initialize_datadir(dirname, n): 156 datadir = os.path.join(dirname, "node"+str(n)) 157 if not os.path.isdir(datadir): 158 os.makedirs(datadir) 159 rpc_u, rpc_p = rpc_auth_pair(n) 160 with open(os.path.join(datadir, "bitcoin.conf"), 'w', encoding='utf8') as f: 161 f.write("regtest=1\n") 162 f.write("rpcuser=" + rpc_u + "\n") 163 f.write("rpcpassword=" + rpc_p + "\n") 164 f.write("port="+str(p2p_port(n))+"\n") 165 f.write("rpcport="+str(rpc_port(n))+"\n") 166 f.write("listenonion=0\n") 167 return datadir 168 169def rpc_auth_pair(n): 170 return 'rpcuser' + str(n), 'rpcpass' + str(n) 171 172def rpc_url(i, rpchost=None): 173 rpc_u, rpc_p = rpc_auth_pair(i) 174 host = '127.0.0.1' 175 port = rpc_port(i) 176 if rpchost: 177 parts = rpchost.split(':') 178 if len(parts) == 2: 179 host, port = parts 180 else: 181 host = rpchost 182 return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, host, int(port)) 183 184def wait_for_bitcoind_start(process, url, i): 185 ''' 186 Wait for bitcoind to start. This means that RPC is accessible and fully initialized. 187 Raise an exception if bitcoind exits during initialization. 188 ''' 189 while True: 190 if process.poll() is not None: 191 raise Exception('bitcoind exited with status %i during initialization' % process.returncode) 192 try: 193 rpc = get_rpc_proxy(url, i) 194 blocks = rpc.getblockcount() 195 break # break out of loop on success 196 except IOError as e: 197 if e.errno != errno.ECONNREFUSED: # Port not yet open? 198 raise # unknown IO error 199 except JSONRPCException as e: # Initialization phase 200 if e.error['code'] != -28: # RPC in warmup? 201 raise # unkown JSON RPC exception 202 time.sleep(0.25) 203 204def initialize_chain(test_dir, num_nodes): 205 """ 206 Create a cache of a 200-block-long chain (with wallet) for MAX_NODES 207 Afterward, create num_nodes copies from the cache 208 """ 209 210 assert num_nodes <= MAX_NODES 211 create_cache = False 212 for i in range(MAX_NODES): 213 if not os.path.isdir(os.path.join('cache', 'node'+str(i))): 214 create_cache = True 215 break 216 217 if create_cache: 218 219 #find and delete old cache directories if any exist 220 for i in range(MAX_NODES): 221 if os.path.isdir(os.path.join("cache","node"+str(i))): 222 shutil.rmtree(os.path.join("cache","node"+str(i))) 223 224 # Create cache directories, run bitcoinds: 225 for i in range(MAX_NODES): 226 datadir=initialize_datadir("cache", i) 227 args = [ os.getenv("BITCOIND", "bitcoind"), "-server", "-keypool=1", "-datadir="+datadir, "-discover=0" ] 228 if i > 0: 229 args.append("-connect=127.0.0.1:"+str(p2p_port(0))) 230 bitcoind_processes[i] = subprocess.Popen(args) 231 if os.getenv("PYTHON_DEBUG", ""): 232 print("initialize_chain: bitcoind started, waiting for RPC to come up") 233 wait_for_bitcoind_start(bitcoind_processes[i], rpc_url(i), i) 234 if os.getenv("PYTHON_DEBUG", ""): 235 print("initialize_chain: RPC succesfully started") 236 237 rpcs = [] 238 for i in range(MAX_NODES): 239 try: 240 rpcs.append(get_rpc_proxy(rpc_url(i), i)) 241 except: 242 sys.stderr.write("Error connecting to "+url+"\n") 243 sys.exit(1) 244 245 # Create a 200-block-long chain; each of the 4 first nodes 246 # gets 25 mature blocks and 25 immature. 247 # Note: To preserve compatibility with older versions of 248 # initialize_chain, only 4 nodes will generate coins. 249 # 250 # blocks are created with timestamps 10 minutes apart 251 # starting from 2010 minutes in the past 252 enable_mocktime() 253 block_time = get_mocktime() - (201 * 10 * 60) 254 for i in range(2): 255 for peer in range(4): 256 for j in range(25): 257 set_node_times(rpcs, block_time) 258 rpcs[peer].generate(1) 259 block_time += 10*60 260 # Must sync before next peer starts generating blocks 261 sync_blocks(rpcs) 262 263 # Shut them down, and clean up cache directories: 264 stop_nodes(rpcs) 265 disable_mocktime() 266 for i in range(MAX_NODES): 267 os.remove(log_filename("cache", i, "debug.log")) 268 os.remove(log_filename("cache", i, "db.log")) 269 os.remove(log_filename("cache", i, "peers.dat")) 270 os.remove(log_filename("cache", i, "fee_estimates.dat")) 271 272 for i in range(num_nodes): 273 from_dir = os.path.join("cache", "node"+str(i)) 274 to_dir = os.path.join(test_dir, "node"+str(i)) 275 shutil.copytree(from_dir, to_dir) 276 initialize_datadir(test_dir, i) # Overwrite port/rpcport in bitcoin.conf 277 278def initialize_chain_clean(test_dir, num_nodes): 279 """ 280 Create an empty blockchain and num_nodes wallets. 281 Useful if a test case wants complete control over initialization. 282 """ 283 for i in range(num_nodes): 284 datadir=initialize_datadir(test_dir, i) 285 286 287def _rpchost_to_args(rpchost): 288 '''Convert optional IP:port spec to rpcconnect/rpcport args''' 289 if rpchost is None: 290 return [] 291 292 match = re.match('(\[[0-9a-fA-f:]+\]|[^:]+)(?::([0-9]+))?$', rpchost) 293 if not match: 294 raise ValueError('Invalid RPC host spec ' + rpchost) 295 296 rpcconnect = match.group(1) 297 rpcport = match.group(2) 298 299 if rpcconnect.startswith('['): # remove IPv6 [...] wrapping 300 rpcconnect = rpcconnect[1:-1] 301 302 rv = ['-rpcconnect=' + rpcconnect] 303 if rpcport: 304 rv += ['-rpcport=' + rpcport] 305 return rv 306 307def start_node(i, dirname, extra_args=None, rpchost=None, timewait=None, binary=None): 308 """ 309 Start a bitcoind and return RPC connection to it 310 """ 311 datadir = os.path.join(dirname, "node"+str(i)) 312 if binary is None: 313 binary = os.getenv("BITCOIND", "bitcoind") 314 args = [ binary, "-datadir="+datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-mocktime="+str(get_mocktime()) ] 315 if extra_args is not None: args.extend(extra_args) 316 bitcoind_processes[i] = subprocess.Popen(args) 317 if os.getenv("PYTHON_DEBUG", ""): 318 print("start_node: bitcoind started, waiting for RPC to come up") 319 url = rpc_url(i, rpchost) 320 wait_for_bitcoind_start(bitcoind_processes[i], url, i) 321 if os.getenv("PYTHON_DEBUG", ""): 322 print("start_node: RPC succesfully started") 323 proxy = get_rpc_proxy(url, i, timeout=timewait) 324 325 if COVERAGE_DIR: 326 coverage.write_all_rpc_commands(COVERAGE_DIR, proxy) 327 328 return proxy 329 330def start_nodes(num_nodes, dirname, extra_args=None, rpchost=None, timewait=None, binary=None): 331 """ 332 Start multiple bitcoinds, return RPC connections to them 333 """ 334 if extra_args is None: extra_args = [ None for _ in range(num_nodes) ] 335 if binary is None: binary = [ None for _ in range(num_nodes) ] 336 rpcs = [] 337 try: 338 for i in range(num_nodes): 339 rpcs.append(start_node(i, dirname, extra_args[i], rpchost, timewait=timewait, binary=binary[i])) 340 except: # If one node failed to start, stop the others 341 stop_nodes(rpcs) 342 raise 343 return rpcs 344 345def log_filename(dirname, n_node, logname): 346 return os.path.join(dirname, "node"+str(n_node), "regtest", logname) 347 348def stop_node(node, i): 349 try: 350 node.stop() 351 except http.client.CannotSendRequest as e: 352 print("WARN: Unable to stop node: " + repr(e)) 353 bitcoind_processes[i].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT) 354 del bitcoind_processes[i] 355 356def stop_nodes(nodes): 357 for node in nodes: 358 try: 359 node.stop() 360 except http.client.CannotSendRequest as e: 361 print("WARN: Unable to stop node: " + repr(e)) 362 del nodes[:] # Emptying array closes connections as a side effect 363 wait_bitcoinds() 364 365def set_node_times(nodes, t): 366 for node in nodes: 367 node.setmocktime(t) 368 369def wait_bitcoinds(): 370 # Wait for all bitcoinds to cleanly exit 371 for bitcoind in bitcoind_processes.values(): 372 bitcoind.wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT) 373 bitcoind_processes.clear() 374 375def connect_nodes(from_connection, node_num): 376 ip_port = "127.0.0.1:"+str(p2p_port(node_num)) 377 from_connection.addnode(ip_port, "onetry") 378 # poll until version handshake complete to avoid race conditions 379 # with transaction relaying 380 while any(peer['version'] == 0 for peer in from_connection.getpeerinfo()): 381 time.sleep(0.1) 382 383def connect_nodes_bi(nodes, a, b): 384 connect_nodes(nodes[a], b) 385 connect_nodes(nodes[b], a) 386 387def find_output(node, txid, amount): 388 """ 389 Return index to output of txid with value amount 390 Raises exception if there is none. 391 """ 392 txdata = node.getrawtransaction(txid, 1) 393 for i in range(len(txdata["vout"])): 394 if txdata["vout"][i]["value"] == amount: 395 return i 396 raise RuntimeError("find_output txid %s : %s not found"%(txid,str(amount))) 397 398 399def gather_inputs(from_node, amount_needed, confirmations_required=1): 400 """ 401 Return a random set of unspent txouts that are enough to pay amount_needed 402 """ 403 assert(confirmations_required >=0) 404 utxo = from_node.listunspent(confirmations_required) 405 random.shuffle(utxo) 406 inputs = [] 407 total_in = Decimal("0.00000000") 408 while total_in < amount_needed and len(utxo) > 0: 409 t = utxo.pop() 410 total_in += t["amount"] 411 inputs.append({ "txid" : t["txid"], "vout" : t["vout"], "address" : t["address"] } ) 412 if total_in < amount_needed: 413 raise RuntimeError("Insufficient funds: need %d, have %d"%(amount_needed, total_in)) 414 return (total_in, inputs) 415 416def make_change(from_node, amount_in, amount_out, fee): 417 """ 418 Create change output(s), return them 419 """ 420 outputs = {} 421 amount = amount_out+fee 422 change = amount_in - amount 423 if change > amount*2: 424 # Create an extra change output to break up big inputs 425 change_address = from_node.getnewaddress() 426 # Split change in two, being careful of rounding: 427 outputs[change_address] = Decimal(change/2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN) 428 change = amount_in - amount - outputs[change_address] 429 if change > 0: 430 outputs[from_node.getnewaddress()] = change 431 return outputs 432 433def send_zeropri_transaction(from_node, to_node, amount, fee): 434 """ 435 Create&broadcast a zero-priority transaction. 436 Returns (txid, hex-encoded-txdata) 437 Ensures transaction is zero-priority by first creating a send-to-self, 438 then using its output 439 """ 440 441 # Create a send-to-self with confirmed inputs: 442 self_address = from_node.getnewaddress() 443 (total_in, inputs) = gather_inputs(from_node, amount+fee*2) 444 outputs = make_change(from_node, total_in, amount+fee, fee) 445 outputs[self_address] = float(amount+fee) 446 447 self_rawtx = from_node.createrawtransaction(inputs, outputs) 448 self_signresult = from_node.signrawtransaction(self_rawtx) 449 self_txid = from_node.sendrawtransaction(self_signresult["hex"], True) 450 451 vout = find_output(from_node, self_txid, amount+fee) 452 # Now immediately spend the output to create a 1-input, 1-output 453 # zero-priority transaction: 454 inputs = [ { "txid" : self_txid, "vout" : vout } ] 455 outputs = { to_node.getnewaddress() : float(amount) } 456 457 rawtx = from_node.createrawtransaction(inputs, outputs) 458 signresult = from_node.signrawtransaction(rawtx) 459 txid = from_node.sendrawtransaction(signresult["hex"], True) 460 461 return (txid, signresult["hex"]) 462 463def random_zeropri_transaction(nodes, amount, min_fee, fee_increment, fee_variants): 464 """ 465 Create a random zero-priority transaction. 466 Returns (txid, hex-encoded-transaction-data, fee) 467 """ 468 from_node = random.choice(nodes) 469 to_node = random.choice(nodes) 470 fee = min_fee + fee_increment*random.randint(0,fee_variants) 471 (txid, txhex) = send_zeropri_transaction(from_node, to_node, amount, fee) 472 return (txid, txhex, fee) 473 474def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants): 475 """ 476 Create a random transaction. 477 Returns (txid, hex-encoded-transaction-data, fee) 478 """ 479 from_node = random.choice(nodes) 480 to_node = random.choice(nodes) 481 fee = min_fee + fee_increment*random.randint(0,fee_variants) 482 483 (total_in, inputs) = gather_inputs(from_node, amount+fee) 484 outputs = make_change(from_node, total_in, amount, fee) 485 outputs[to_node.getnewaddress()] = float(amount) 486 487 rawtx = from_node.createrawtransaction(inputs, outputs) 488 signresult = from_node.signrawtransaction(rawtx) 489 txid = from_node.sendrawtransaction(signresult["hex"], True) 490 491 return (txid, signresult["hex"], fee) 492 493def assert_fee_amount(fee, tx_size, fee_per_kB): 494 """Assert the fee was in range""" 495 target_fee = tx_size * fee_per_kB / 1000 496 if fee < target_fee: 497 raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)"%(str(fee), str(target_fee))) 498 # allow the wallet's estimation to be at most 2 bytes off 499 if fee > (tx_size + 2) * fee_per_kB / 1000: 500 raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)"%(str(fee), str(target_fee))) 501 502def assert_equal(thing1, thing2): 503 if thing1 != thing2: 504 raise AssertionError("%s != %s"%(str(thing1),str(thing2))) 505 506def assert_greater_than(thing1, thing2): 507 if thing1 <= thing2: 508 raise AssertionError("%s <= %s"%(str(thing1),str(thing2))) 509 510def assert_raises(exc, fun, *args, **kwds): 511 assert_raises_message(exc, None, fun, *args, **kwds) 512 513def assert_raises_message(exc, message, fun, *args, **kwds): 514 try: 515 fun(*args, **kwds) 516 except exc as e: 517 if message is not None and message not in e.error['message']: 518 raise AssertionError("Expected substring not found:"+e.error['message']) 519 except Exception as e: 520 raise AssertionError("Unexpected exception raised: "+type(e).__name__) 521 else: 522 raise AssertionError("No exception raised") 523 524def assert_is_hex_string(string): 525 try: 526 int(string, 16) 527 except Exception as e: 528 raise AssertionError( 529 "Couldn't interpret %r as hexadecimal; raised: %s" % (string, e)) 530 531def assert_is_hash_string(string, length=64): 532 if not isinstance(string, str): 533 raise AssertionError("Expected a string, got type %r" % type(string)) 534 elif length and len(string) != length: 535 raise AssertionError( 536 "String of length %d expected; got %d" % (length, len(string))) 537 elif not re.match('[abcdef0-9]+$', string): 538 raise AssertionError( 539 "String %r contains invalid characters for a hash." % string) 540 541def assert_array_result(object_array, to_match, expected, should_not_find = False): 542 """ 543 Pass in array of JSON objects, a dictionary with key/value pairs 544 to match against, and another dictionary with expected key/value 545 pairs. 546 If the should_not_find flag is true, to_match should not be found 547 in object_array 548 """ 549 if should_not_find == True: 550 assert_equal(expected, { }) 551 num_matched = 0 552 for item in object_array: 553 all_match = True 554 for key,value in to_match.items(): 555 if item[key] != value: 556 all_match = False 557 if not all_match: 558 continue 559 elif should_not_find == True: 560 num_matched = num_matched+1 561 for key,value in expected.items(): 562 if item[key] != value: 563 raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value))) 564 num_matched = num_matched+1 565 if num_matched == 0 and should_not_find != True: 566 raise AssertionError("No objects matched %s"%(str(to_match))) 567 if num_matched > 0 and should_not_find == True: 568 raise AssertionError("Objects were found %s"%(str(to_match))) 569 570def satoshi_round(amount): 571 return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN) 572 573# Helper to create at least "count" utxos 574# Pass in a fee that is sufficient for relay and mining new transactions. 575def create_confirmed_utxos(fee, node, count): 576 node.generate(int(0.5*count)+101) 577 utxos = node.listunspent() 578 iterations = count - len(utxos) 579 addr1 = node.getnewaddress() 580 addr2 = node.getnewaddress() 581 if iterations <= 0: 582 return utxos 583 for i in range(iterations): 584 t = utxos.pop() 585 inputs = [] 586 inputs.append({ "txid" : t["txid"], "vout" : t["vout"]}) 587 outputs = {} 588 send_value = t['amount'] - fee 589 outputs[addr1] = satoshi_round(send_value/2) 590 outputs[addr2] = satoshi_round(send_value/2) 591 raw_tx = node.createrawtransaction(inputs, outputs) 592 signed_tx = node.signrawtransaction(raw_tx)["hex"] 593 txid = node.sendrawtransaction(signed_tx) 594 595 while (node.getmempoolinfo()['size'] > 0): 596 node.generate(1) 597 598 utxos = node.listunspent() 599 assert(len(utxos) >= count) 600 return utxos 601 602# Create large OP_RETURN txouts that can be appended to a transaction 603# to make it large (helper for constructing large transactions). 604def gen_return_txouts(): 605 # Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create 606 # So we have big transactions (and therefore can't fit very many into each block) 607 # create one script_pubkey 608 script_pubkey = "6a4d0200" #OP_RETURN OP_PUSH2 512 bytes 609 for i in range (512): 610 script_pubkey = script_pubkey + "01" 611 # concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change 612 txouts = "81" 613 for k in range(128): 614 # add txout value 615 txouts = txouts + "0000000000000000" 616 # add length of script_pubkey 617 txouts = txouts + "fd0402" 618 # add script_pubkey 619 txouts = txouts + script_pubkey 620 return txouts 621 622def create_tx(node, coinbase, to_address, amount): 623 inputs = [{ "txid" : coinbase, "vout" : 0}] 624 outputs = { to_address : amount } 625 rawtx = node.createrawtransaction(inputs, outputs) 626 signresult = node.signrawtransaction(rawtx) 627 assert_equal(signresult["complete"], True) 628 return signresult["hex"] 629 630# Create a spend of each passed-in utxo, splicing in "txouts" to each raw 631# transaction to make it large. See gen_return_txouts() above. 632def create_lots_of_big_transactions(node, txouts, utxos, fee): 633 addr = node.getnewaddress() 634 txids = [] 635 for i in range(len(utxos)): 636 t = utxos.pop() 637 inputs = [] 638 inputs.append({ "txid" : t["txid"], "vout" : t["vout"]}) 639 outputs = {} 640 send_value = t['amount'] - fee 641 outputs[addr] = satoshi_round(send_value) 642 rawtx = node.createrawtransaction(inputs, outputs) 643 newtx = rawtx[0:92] 644 newtx = newtx + txouts 645 newtx = newtx + rawtx[94:] 646 signresult = node.signrawtransaction(newtx, None, None, "NONE") 647 txid = node.sendrawtransaction(signresult["hex"], True) 648 txids.append(txid) 649 return txids 650 651def get_bip9_status(node, key): 652 info = node.getblockchaininfo() 653 return info['bip9_softforks'][key] 654