You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

util.py 21KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619
  1. #!/usr/bin/env python3
  2. # Copyright (c) 2014-2016 The Bitcoin Core developers
  3. # Distributed under the MIT software license, see the accompanying
  4. # file COPYING or http://www.opensource.org/licenses/mit-license.php.
  5. #
  6. # Helpful routines for regression testing
  7. #
  8. import os
  9. import sys
  10. from binascii import hexlify, unhexlify
  11. from base64 import b64encode
  12. from decimal import Decimal, ROUND_DOWN
  13. import json
  14. import http.client
  15. import random
  16. import shutil
  17. import subprocess
  18. import time
  19. import re
  20. import errno
  21. from . import coverage
  22. from .authproxy import AuthServiceProxy, JSONRPCException
  23. COVERAGE_DIR = None
  24. # The maximum number of nodes a single test can spawn
  25. MAX_NODES = 8
  26. # Don't assign rpc or p2p ports lower than this
  27. PORT_MIN = 11000
  28. # The number of ports to "reserve" for p2p and rpc, each
  29. PORT_RANGE = 5000
  30. class PortSeed:
  31. # Must be initialized with a unique integer for each process
  32. n = None
  33. #Set Mocktime default to OFF.
  34. #MOCKTIME is only needed for scripts that use the
  35. #cached version of the blockchain. If the cached
  36. #version of the blockchain is used without MOCKTIME
  37. #then the mempools will not sync due to IBD.
  38. MOCKTIME = 0
  39. def enable_mocktime():
  40. #For backwared compatibility of the python scripts
  41. #with previous versions of the cache, set MOCKTIME
  42. #to Jan 1, 2014 + (201 * 10 * 60)
  43. global MOCKTIME
  44. MOCKTIME = 1388534400 + (201 * 10 * 60)
  45. def disable_mocktime():
  46. global MOCKTIME
  47. MOCKTIME = 0
  48. def get_mocktime():
  49. return MOCKTIME
  50. def enable_coverage(dirname):
  51. """Maintain a log of which RPC calls are made during testing."""
  52. global COVERAGE_DIR
  53. COVERAGE_DIR = dirname
  54. def get_rpc_proxy(url, node_number, timeout=None):
  55. """
  56. Args:
  57. url (str): URL of the RPC server to call
  58. node_number (int): the node number (or id) that this calls to
  59. Kwargs:
  60. timeout (int): HTTP timeout in seconds
  61. Returns:
  62. AuthServiceProxy. convenience object for making RPC calls.
  63. """
  64. proxy_kwargs = {}
  65. if timeout is not None:
  66. proxy_kwargs['timeout'] = timeout
  67. proxy = AuthServiceProxy(url, **proxy_kwargs)
  68. proxy.url = url # store URL on proxy for info
  69. coverage_logfile = coverage.get_filename(
  70. COVERAGE_DIR, node_number) if COVERAGE_DIR else None
  71. return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile)
  72. def p2p_port(n):
  73. assert(n <= MAX_NODES)
  74. return PORT_MIN + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
  75. def rpc_port(n):
  76. return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
  77. def check_json_precision():
  78. """Make sure json library being used does not lose precision converting BTC values"""
  79. n = Decimal("20000000.00000003")
  80. satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
  81. if satoshis != 2000000000000003:
  82. raise RuntimeError("JSON encode/decode loses precision")
  83. def count_bytes(hex_string):
  84. return len(bytearray.fromhex(hex_string))
  85. def bytes_to_hex_str(byte_str):
  86. return hexlify(byte_str).decode('ascii')
  87. def hex_str_to_bytes(hex_str):
  88. return unhexlify(hex_str.encode('ascii'))
  89. def str_to_b64str(string):
  90. return b64encode(string.encode('utf-8')).decode('ascii')
  91. def sync_blocks(rpc_connections, wait=1):
  92. """
  93. Wait until everybody has the same block count
  94. """
  95. while True:
  96. counts = [ x.getblockcount() for x in rpc_connections ]
  97. if counts == [ counts[0] ]*len(counts):
  98. break
  99. time.sleep(wait)
  100. def sync_mempools(rpc_connections, wait=1):
  101. """
  102. Wait until everybody has the same transactions in their memory
  103. pools
  104. """
  105. while True:
  106. pool = set(rpc_connections[0].getrawmempool())
  107. num_match = 1
  108. for i in range(1, len(rpc_connections)):
  109. if set(rpc_connections[i].getrawmempool()) == pool:
  110. num_match = num_match+1
  111. if num_match == len(rpc_connections):
  112. break
  113. time.sleep(wait)
  114. bitcoind_processes = {}
  115. def initialize_datadir(dirname, n):
  116. datadir = os.path.join(dirname, "node"+str(n))
  117. if not os.path.isdir(datadir):
  118. os.makedirs(datadir)
  119. rpc_u, rpc_p = rpc_auth_pair(n)
  120. with open(os.path.join(datadir, "bitcoin.conf"), 'w') as f:
  121. f.write("regtest=1\n")
  122. f.write("rpcuser=" + rpc_u + "\n")
  123. f.write("rpcpassword=" + rpc_p + "\n")
  124. f.write("port="+str(p2p_port(n))+"\n")
  125. f.write("rpcport="+str(rpc_port(n))+"\n")
  126. f.write("listenonion=0\n")
  127. return datadir
  128. def rpc_auth_pair(n):
  129. return 'rpcuser💻' + str(n), 'rpcpass🔑' + str(n)
  130. def rpc_url(i, rpchost=None):
  131. rpc_u, rpc_p = rpc_auth_pair(i)
  132. return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, rpchost or '127.0.0.1', rpc_port(i))
  133. def wait_for_bitcoind_start(process, url, i):
  134. '''
  135. Wait for bitcoind to start. This means that RPC is accessible and fully initialized.
  136. Raise an exception if bitcoind exits during initialization.
  137. '''
  138. while True:
  139. if process.poll() is not None:
  140. raise Exception('bitcoind exited with status %i during initialization' % process.returncode)
  141. try:
  142. rpc = get_rpc_proxy(url, i)
  143. blocks = rpc.getblockcount()
  144. break # break out of loop on success
  145. except IOError as e:
  146. if e.errno != errno.ECONNREFUSED: # Port not yet open?
  147. raise # unknown IO error
  148. except JSONRPCException as e: # Initialization phase
  149. if e.error['code'] != -28: # RPC in warmup?
  150. raise # unkown JSON RPC exception
  151. time.sleep(0.25)
  152. def initialize_chain(test_dir):
  153. """
  154. Create (or copy from cache) a 200-block-long chain and
  155. 4 wallets.
  156. """
  157. if (not os.path.isdir(os.path.join("cache","node0"))
  158. or not os.path.isdir(os.path.join("cache","node1"))
  159. or not os.path.isdir(os.path.join("cache","node2"))
  160. or not os.path.isdir(os.path.join("cache","node3"))):
  161. #find and delete old cache directories if any exist
  162. for i in range(4):
  163. if os.path.isdir(os.path.join("cache","node"+str(i))):
  164. shutil.rmtree(os.path.join("cache","node"+str(i)))
  165. # Create cache directories, run bitcoinds:
  166. for i in range(4):
  167. datadir=initialize_datadir("cache", i)
  168. args = [ os.getenv("BITCOIND", "bitcoind"), "-server", "-keypool=1", "-datadir="+datadir, "-discover=0" ]
  169. if i > 0:
  170. args.append("-connect=127.0.0.1:"+str(p2p_port(0)))
  171. bitcoind_processes[i] = subprocess.Popen(args)
  172. if os.getenv("PYTHON_DEBUG", ""):
  173. print("initialize_chain: bitcoind started, waiting for RPC to come up")
  174. wait_for_bitcoind_start(bitcoind_processes[i], rpc_url(i), i)
  175. if os.getenv("PYTHON_DEBUG", ""):
  176. print("initialize_chain: RPC succesfully started")
  177. rpcs = []
  178. for i in range(4):
  179. try:
  180. rpcs.append(get_rpc_proxy(rpc_url(i), i))
  181. except:
  182. sys.stderr.write("Error connecting to "+url+"\n")
  183. sys.exit(1)
  184. # Create a 200-block-long chain; each of the 4 nodes
  185. # gets 25 mature blocks and 25 immature.
  186. # blocks are created with timestamps 10 minutes apart
  187. # starting from 2010 minutes in the past
  188. enable_mocktime()
  189. block_time = get_mocktime() - (201 * 10 * 60)
  190. for i in range(2):
  191. for peer in range(4):
  192. for j in range(25):
  193. set_node_times(rpcs, block_time)
  194. rpcs[peer].generate(1)
  195. block_time += 10*60
  196. # Must sync before next peer starts generating blocks
  197. sync_blocks(rpcs)
  198. # Shut them down, and clean up cache directories:
  199. stop_nodes(rpcs)
  200. wait_bitcoinds()
  201. disable_mocktime()
  202. for i in range(4):
  203. os.remove(log_filename("cache", i, "debug.log"))
  204. os.remove(log_filename("cache", i, "db.log"))
  205. os.remove(log_filename("cache", i, "peers.dat"))
  206. os.remove(log_filename("cache", i, "fee_estimates.dat"))
  207. for i in range(4):
  208. from_dir = os.path.join("cache", "node"+str(i))
  209. to_dir = os.path.join(test_dir, "node"+str(i))
  210. shutil.copytree(from_dir, to_dir)
  211. initialize_datadir(test_dir, i) # Overwrite port/rpcport in bitcoin.conf
  212. def initialize_chain_clean(test_dir, num_nodes):
  213. """
  214. Create an empty blockchain and num_nodes wallets.
  215. Useful if a test case wants complete control over initialization.
  216. """
  217. for i in range(num_nodes):
  218. datadir=initialize_datadir(test_dir, i)
  219. def _rpchost_to_args(rpchost):
  220. '''Convert optional IP:port spec to rpcconnect/rpcport args'''
  221. if rpchost is None:
  222. return []
  223. match = re.match('(\[[0-9a-fA-f:]+\]|[^:]+)(?::([0-9]+))?$', rpchost)
  224. if not match:
  225. raise ValueError('Invalid RPC host spec ' + rpchost)
  226. rpcconnect = match.group(1)
  227. rpcport = match.group(2)
  228. if rpcconnect.startswith('['): # remove IPv6 [...] wrapping
  229. rpcconnect = rpcconnect[1:-1]
  230. rv = ['-rpcconnect=' + rpcconnect]
  231. if rpcport:
  232. rv += ['-rpcport=' + rpcport]
  233. return rv
  234. def start_node(i, dirname, extra_args=None, rpchost=None, timewait=None, binary=None):
  235. """
  236. Start a bitcoind and return RPC connection to it
  237. """
  238. datadir = os.path.join(dirname, "node"+str(i))
  239. if binary is None:
  240. binary = os.getenv("BITCOIND", "bitcoind")
  241. args = [ binary, "-datadir="+datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-mocktime="+str(get_mocktime()) ]
  242. if extra_args is not None: args.extend(extra_args)
  243. bitcoind_processes[i] = subprocess.Popen(args)
  244. if os.getenv("PYTHON_DEBUG", ""):
  245. print("start_node: bitcoind started, waiting for RPC to come up")
  246. url = rpc_url(i, rpchost)
  247. wait_for_bitcoind_start(bitcoind_processes[i], url, i)
  248. if os.getenv("PYTHON_DEBUG", ""):
  249. print("start_node: RPC succesfully started")
  250. proxy = get_rpc_proxy(url, i, timeout=timewait)
  251. if COVERAGE_DIR:
  252. coverage.write_all_rpc_commands(COVERAGE_DIR, proxy)
  253. return proxy
  254. def start_nodes(num_nodes, dirname, extra_args=None, rpchost=None, binary=None):
  255. """
  256. Start multiple bitcoinds, return RPC connections to them
  257. """
  258. if extra_args is None: extra_args = [ None for _ in range(num_nodes) ]
  259. if binary is None: binary = [ None for _ in range(num_nodes) ]
  260. rpcs = []
  261. try:
  262. for i in range(num_nodes):
  263. rpcs.append(start_node(i, dirname, extra_args[i], rpchost, binary=binary[i]))
  264. except: # If one node failed to start, stop the others
  265. stop_nodes(rpcs)
  266. raise
  267. return rpcs
  268. def log_filename(dirname, n_node, logname):
  269. return os.path.join(dirname, "node"+str(n_node), "regtest", logname)
  270. def stop_node(node, i):
  271. try:
  272. node.stop()
  273. except http.client.CannotSendRequest as e:
  274. print("WARN: Unable to stop node: " + repr(e))
  275. bitcoind_processes[i].wait()
  276. del bitcoind_processes[i]
  277. def stop_nodes(nodes):
  278. for node in nodes:
  279. try:
  280. node.stop()
  281. except http.client.CannotSendRequest as e:
  282. print("WARN: Unable to stop node: " + repr(e))
  283. del nodes[:] # Emptying array closes connections as a side effect
  284. def set_node_times(nodes, t):
  285. for node in nodes:
  286. node.setmocktime(t)
  287. def wait_bitcoinds():
  288. # Wait for all bitcoinds to cleanly exit
  289. for bitcoind in bitcoind_processes.values():
  290. bitcoind.wait()
  291. bitcoind_processes.clear()
  292. def connect_nodes(from_connection, node_num):
  293. ip_port = "127.0.0.1:"+str(p2p_port(node_num))
  294. from_connection.addnode(ip_port, "onetry")
  295. # poll until version handshake complete to avoid race conditions
  296. # with transaction relaying
  297. while any(peer['version'] == 0 for peer in from_connection.getpeerinfo()):
  298. time.sleep(0.1)
  299. def connect_nodes_bi(nodes, a, b):
  300. connect_nodes(nodes[a], b)
  301. connect_nodes(nodes[b], a)
  302. def find_output(node, txid, amount):
  303. """
  304. Return index to output of txid with value amount
  305. Raises exception if there is none.
  306. """
  307. txdata = node.getrawtransaction(txid, 1)
  308. for i in range(len(txdata["vout"])):
  309. if txdata["vout"][i]["value"] == amount:
  310. return i
  311. raise RuntimeError("find_output txid %s : %s not found"%(txid,str(amount)))
  312. def gather_inputs(from_node, amount_needed, confirmations_required=1):
  313. """
  314. Return a random set of unspent txouts that are enough to pay amount_needed
  315. """
  316. assert(confirmations_required >=0)
  317. utxo = from_node.listunspent(confirmations_required)
  318. random.shuffle(utxo)
  319. inputs = []
  320. total_in = Decimal("0.00000000")
  321. while total_in < amount_needed and len(utxo) > 0:
  322. t = utxo.pop()
  323. total_in += t["amount"]
  324. inputs.append({ "txid" : t["txid"], "vout" : t["vout"], "address" : t["address"] } )
  325. if total_in < amount_needed:
  326. raise RuntimeError("Insufficient funds: need %d, have %d"%(amount_needed, total_in))
  327. return (total_in, inputs)
  328. def make_change(from_node, amount_in, amount_out, fee):
  329. """
  330. Create change output(s), return them
  331. """
  332. outputs = {}
  333. amount = amount_out+fee
  334. change = amount_in - amount
  335. if change > amount*2:
  336. # Create an extra change output to break up big inputs
  337. change_address = from_node.getnewaddress()
  338. # Split change in two, being careful of rounding:
  339. outputs[change_address] = Decimal(change/2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
  340. change = amount_in - amount - outputs[change_address]
  341. if change > 0:
  342. outputs[from_node.getnewaddress()] = change
  343. return outputs
  344. def send_zeropri_transaction(from_node, to_node, amount, fee):
  345. """
  346. Create&broadcast a zero-priority transaction.
  347. Returns (txid, hex-encoded-txdata)
  348. Ensures transaction is zero-priority by first creating a send-to-self,
  349. then using its output
  350. """
  351. # Create a send-to-self with confirmed inputs:
  352. self_address = from_node.getnewaddress()
  353. (total_in, inputs) = gather_inputs(from_node, amount+fee*2)
  354. outputs = make_change(from_node, total_in, amount+fee, fee)
  355. outputs[self_address] = float(amount+fee)
  356. self_rawtx = from_node.createrawtransaction(inputs, outputs)
  357. self_signresult = from_node.signrawtransaction(self_rawtx)
  358. self_txid = from_node.sendrawtransaction(self_signresult["hex"], True)
  359. vout = find_output(from_node, self_txid, amount+fee)
  360. # Now immediately spend the output to create a 1-input, 1-output
  361. # zero-priority transaction:
  362. inputs = [ { "txid" : self_txid, "vout" : vout } ]
  363. outputs = { to_node.getnewaddress() : float(amount) }
  364. rawtx = from_node.createrawtransaction(inputs, outputs)
  365. signresult = from_node.signrawtransaction(rawtx)
  366. txid = from_node.sendrawtransaction(signresult["hex"], True)
  367. return (txid, signresult["hex"])
  368. def random_zeropri_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
  369. """
  370. Create a random zero-priority transaction.
  371. Returns (txid, hex-encoded-transaction-data, fee)
  372. """
  373. from_node = random.choice(nodes)
  374. to_node = random.choice(nodes)
  375. fee = min_fee + fee_increment*random.randint(0,fee_variants)
  376. (txid, txhex) = send_zeropri_transaction(from_node, to_node, amount, fee)
  377. return (txid, txhex, fee)
  378. def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
  379. """
  380. Create a random transaction.
  381. Returns (txid, hex-encoded-transaction-data, fee)
  382. """
  383. from_node = random.choice(nodes)
  384. to_node = random.choice(nodes)
  385. fee = min_fee + fee_increment*random.randint(0,fee_variants)
  386. (total_in, inputs) = gather_inputs(from_node, amount+fee)
  387. outputs = make_change(from_node, total_in, amount, fee)
  388. outputs[to_node.getnewaddress()] = float(amount)
  389. rawtx = from_node.createrawtransaction(inputs, outputs)
  390. signresult = from_node.signrawtransaction(rawtx)
  391. txid = from_node.sendrawtransaction(signresult["hex"], True)
  392. return (txid, signresult["hex"], fee)
  393. def assert_equal(thing1, thing2):
  394. if thing1 != thing2:
  395. raise AssertionError("%s != %s"%(str(thing1),str(thing2)))
  396. def assert_greater_than(thing1, thing2):
  397. if thing1 <= thing2:
  398. raise AssertionError("%s <= %s"%(str(thing1),str(thing2)))
  399. def assert_raises(exc, fun, *args, **kwds):
  400. try:
  401. fun(*args, **kwds)
  402. except exc:
  403. pass
  404. except Exception as e:
  405. raise AssertionError("Unexpected exception raised: "+type(e).__name__)
  406. else:
  407. raise AssertionError("No exception raised")
  408. def assert_is_hex_string(string):
  409. try:
  410. int(string, 16)
  411. except Exception as e:
  412. raise AssertionError(
  413. "Couldn't interpret %r as hexadecimal; raised: %s" % (string, e))
  414. def assert_is_hash_string(string, length=64):
  415. if not isinstance(string, str):
  416. raise AssertionError("Expected a string, got type %r" % type(string))
  417. elif length and len(string) != length:
  418. raise AssertionError(
  419. "String of length %d expected; got %d" % (length, len(string)))
  420. elif not re.match('[abcdef0-9]+$', string):
  421. raise AssertionError(
  422. "String %r contains invalid characters for a hash." % string)
  423. def assert_array_result(object_array, to_match, expected, should_not_find = False):
  424. """
  425. Pass in array of JSON objects, a dictionary with key/value pairs
  426. to match against, and another dictionary with expected key/value
  427. pairs.
  428. If the should_not_find flag is true, to_match should not be found
  429. in object_array
  430. """
  431. if should_not_find == True:
  432. assert_equal(expected, { })
  433. num_matched = 0
  434. for item in object_array:
  435. all_match = True
  436. for key,value in to_match.items():
  437. if item[key] != value:
  438. all_match = False
  439. if not all_match:
  440. continue
  441. elif should_not_find == True:
  442. num_matched = num_matched+1
  443. for key,value in expected.items():
  444. if item[key] != value:
  445. raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value)))
  446. num_matched = num_matched+1
  447. if num_matched == 0 and should_not_find != True:
  448. raise AssertionError("No objects matched %s"%(str(to_match)))
  449. if num_matched > 0 and should_not_find == True:
  450. raise AssertionError("Objects were found %s"%(str(to_match)))
  451. def satoshi_round(amount):
  452. return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
  453. # Helper to create at least "count" utxos
  454. # Pass in a fee that is sufficient for relay and mining new transactions.
  455. def create_confirmed_utxos(fee, node, count):
  456. node.generate(int(0.5*count)+101)
  457. utxos = node.listunspent()
  458. iterations = count - len(utxos)
  459. addr1 = node.getnewaddress()
  460. addr2 = node.getnewaddress()
  461. if iterations <= 0:
  462. return utxos
  463. for i in range(iterations):
  464. t = utxos.pop()
  465. inputs = []
  466. inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})
  467. outputs = {}
  468. send_value = t['amount'] - fee
  469. outputs[addr1] = satoshi_round(send_value/2)
  470. outputs[addr2] = satoshi_round(send_value/2)
  471. raw_tx = node.createrawtransaction(inputs, outputs)
  472. signed_tx = node.signrawtransaction(raw_tx)["hex"]
  473. txid = node.sendrawtransaction(signed_tx)
  474. while (node.getmempoolinfo()['size'] > 0):
  475. node.generate(1)
  476. utxos = node.listunspent()
  477. assert(len(utxos) >= count)
  478. return utxos
  479. # Create large OP_RETURN txouts that can be appended to a transaction
  480. # to make it large (helper for constructing large transactions).
  481. def gen_return_txouts():
  482. # Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create
  483. # So we have big transactions (and therefore can't fit very many into each block)
  484. # create one script_pubkey
  485. script_pubkey = "6a4d0200" #OP_RETURN OP_PUSH2 512 bytes
  486. for i in range (512):
  487. script_pubkey = script_pubkey + "01"
  488. # concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change
  489. txouts = "81"
  490. for k in range(128):
  491. # add txout value
  492. txouts = txouts + "0000000000000000"
  493. # add length of script_pubkey
  494. txouts = txouts + "fd0402"
  495. # add script_pubkey
  496. txouts = txouts + script_pubkey
  497. return txouts
  498. def create_tx(node, coinbase, to_address, amount):
  499. inputs = [{ "txid" : coinbase, "vout" : 0}]
  500. outputs = { to_address : amount }
  501. rawtx = node.createrawtransaction(inputs, outputs)
  502. signresult = node.signrawtransaction(rawtx)
  503. assert_equal(signresult["complete"], True)
  504. return signresult["hex"]
  505. # Create a spend of each passed-in utxo, splicing in "txouts" to each raw
  506. # transaction to make it large. See gen_return_txouts() above.
  507. def create_lots_of_big_transactions(node, txouts, utxos, fee):
  508. addr = node.getnewaddress()
  509. txids = []
  510. for i in range(len(utxos)):
  511. t = utxos.pop()
  512. inputs = []
  513. inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})
  514. outputs = {}
  515. send_value = t['amount'] - fee
  516. outputs[addr] = satoshi_round(send_value)
  517. rawtx = node.createrawtransaction(inputs, outputs)
  518. newtx = rawtx[0:92]
  519. newtx = newtx + txouts
  520. newtx = newtx + rawtx[94:]
  521. signresult = node.signrawtransaction(newtx, None, None, "NONE")
  522. txid = node.sendrawtransaction(signresult["hex"], True)
  523. txids.append(txid)
  524. return txids
  525. def get_bip9_status(node, key):
  526. info = node.getblockchaininfo()
  527. return info['bip9_softforks'][key]