You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

util.py 25KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670
  1. #!/usr/bin/env python3
  2. # Copyright (c) 2014-2016 The Bitcoin Core developers
  3. # Distributed under the MIT software license, see the accompanying
  4. # file COPYING or http://www.opensource.org/licenses/mit-license.php.
  5. """Helpful routines for regression testing."""
  6. import os
  7. import sys
  8. from binascii import hexlify, unhexlify
  9. from base64 import b64encode
  10. from decimal import Decimal, ROUND_DOWN
  11. import json
  12. import http.client
  13. import random
  14. import shutil
  15. import subprocess
  16. import tempfile
  17. import time
  18. import re
  19. import errno
  20. import logging
  21. from . import coverage
  22. from .authproxy import AuthServiceProxy, JSONRPCException
  23. COVERAGE_DIR = None
  24. logger = logging.getLogger("TestFramework.utils")
  25. # The maximum number of nodes a single test can spawn
  26. MAX_NODES = 8
  27. # Don't assign rpc or p2p ports lower than this
  28. PORT_MIN = 11000
  29. # The number of ports to "reserve" for p2p and rpc, each
  30. PORT_RANGE = 5000
  31. BITCOIND_PROC_WAIT_TIMEOUT = 60
  32. class PortSeed:
  33. # Must be initialized with a unique integer for each process
  34. n = None
  35. #Set Mocktime default to OFF.
  36. #MOCKTIME is only needed for scripts that use the
  37. #cached version of the blockchain. If the cached
  38. #version of the blockchain is used without MOCKTIME
  39. #then the mempools will not sync due to IBD.
  40. MOCKTIME = 0
  41. def enable_mocktime():
  42. #For backwared compatibility of the python scripts
  43. #with previous versions of the cache, set MOCKTIME
  44. #to Jan 1, 2014 + (201 * 10 * 60)
  45. global MOCKTIME
  46. MOCKTIME = 1388534400 + (201 * 10 * 60)
  47. def disable_mocktime():
  48. global MOCKTIME
  49. MOCKTIME = 0
  50. def get_mocktime():
  51. return MOCKTIME
  52. def enable_coverage(dirname):
  53. """Maintain a log of which RPC calls are made during testing."""
  54. global COVERAGE_DIR
  55. COVERAGE_DIR = dirname
  56. def get_rpc_proxy(url, node_number, timeout=None):
  57. """
  58. Args:
  59. url (str): URL of the RPC server to call
  60. node_number (int): the node number (or id) that this calls to
  61. Kwargs:
  62. timeout (int): HTTP timeout in seconds
  63. Returns:
  64. AuthServiceProxy. convenience object for making RPC calls.
  65. """
  66. proxy_kwargs = {}
  67. if timeout is not None:
  68. proxy_kwargs['timeout'] = timeout
  69. proxy = AuthServiceProxy(url, **proxy_kwargs)
  70. proxy.url = url # store URL on proxy for info
  71. coverage_logfile = coverage.get_filename(
  72. COVERAGE_DIR, node_number) if COVERAGE_DIR else None
  73. return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile)
  74. def p2p_port(n):
  75. assert(n <= MAX_NODES)
  76. return PORT_MIN + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
  77. def rpc_port(n):
  78. return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
  79. def check_json_precision():
  80. """Make sure json library being used does not lose precision converting BTC values"""
  81. n = Decimal("20000000.00000003")
  82. satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
  83. if satoshis != 2000000000000003:
  84. raise RuntimeError("JSON encode/decode loses precision")
  85. def count_bytes(hex_string):
  86. return len(bytearray.fromhex(hex_string))
  87. def bytes_to_hex_str(byte_str):
  88. return hexlify(byte_str).decode('ascii')
  89. def hex_str_to_bytes(hex_str):
  90. return unhexlify(hex_str.encode('ascii'))
  91. def str_to_b64str(string):
  92. return b64encode(string.encode('utf-8')).decode('ascii')
  93. def sync_blocks(rpc_connections, *, wait=1, timeout=60):
  94. """
  95. Wait until everybody has the same tip.
  96. sync_blocks needs to be called with an rpc_connections set that has least
  97. one node already synced to the latest, stable tip, otherwise there's a
  98. chance it might return before all nodes are stably synced.
  99. """
  100. # Use getblockcount() instead of waitforblockheight() to determine the
  101. # initial max height because the two RPCs look at different internal global
  102. # variables (chainActive vs latestBlock) and the former gets updated
  103. # earlier.
  104. maxheight = max(x.getblockcount() for x in rpc_connections)
  105. start_time = cur_time = time.time()
  106. while cur_time <= start_time + timeout:
  107. tips = [r.waitforblockheight(maxheight, int(wait * 1000)) for r in rpc_connections]
  108. if all(t["height"] == maxheight for t in tips):
  109. if all(t["hash"] == tips[0]["hash"] for t in tips):
  110. return
  111. raise AssertionError("Block sync failed, mismatched block hashes:{}".format(
  112. "".join("\n {!r}".format(tip) for tip in tips)))
  113. cur_time = time.time()
  114. raise AssertionError("Block sync to height {} timed out:{}".format(
  115. maxheight, "".join("\n {!r}".format(tip) for tip in tips)))
  116. def sync_chain(rpc_connections, *, wait=1, timeout=60):
  117. """
  118. Wait until everybody has the same best block
  119. """
  120. while timeout > 0:
  121. best_hash = [x.getbestblockhash() for x in rpc_connections]
  122. if best_hash == [best_hash[0]]*len(best_hash):
  123. return
  124. time.sleep(wait)
  125. timeout -= wait
  126. raise AssertionError("Chain sync failed: Best block hashes don't match")
  127. def sync_mempools(rpc_connections, *, wait=1, timeout=60):
  128. """
  129. Wait until everybody has the same transactions in their memory
  130. pools
  131. """
  132. while timeout > 0:
  133. pool = set(rpc_connections[0].getrawmempool())
  134. num_match = 1
  135. for i in range(1, len(rpc_connections)):
  136. if set(rpc_connections[i].getrawmempool()) == pool:
  137. num_match = num_match+1
  138. if num_match == len(rpc_connections):
  139. return
  140. time.sleep(wait)
  141. timeout -= wait
  142. raise AssertionError("Mempool sync failed")
  143. bitcoind_processes = {}
  144. def initialize_datadir(dirname, n):
  145. datadir = os.path.join(dirname, "node"+str(n))
  146. if not os.path.isdir(datadir):
  147. os.makedirs(datadir)
  148. rpc_u, rpc_p = rpc_auth_pair(n)
  149. with open(os.path.join(datadir, "bitcoin.conf"), 'w', encoding='utf8') as f:
  150. f.write("regtest=1\n")
  151. f.write("rpcuser=" + rpc_u + "\n")
  152. f.write("rpcpassword=" + rpc_p + "\n")
  153. f.write("port="+str(p2p_port(n))+"\n")
  154. f.write("rpcport="+str(rpc_port(n))+"\n")
  155. f.write("listenonion=0\n")
  156. return datadir
  157. def rpc_auth_pair(n):
  158. return 'rpcuser💻' + str(n), 'rpcpass🔑' + str(n)
  159. def rpc_url(i, rpchost=None):
  160. rpc_u, rpc_p = rpc_auth_pair(i)
  161. host = '127.0.0.1'
  162. port = rpc_port(i)
  163. if rpchost:
  164. parts = rpchost.split(':')
  165. if len(parts) == 2:
  166. host, port = parts
  167. else:
  168. host = rpchost
  169. return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, host, int(port))
  170. def wait_for_bitcoind_start(process, url, i):
  171. '''
  172. Wait for bitcoind to start. This means that RPC is accessible and fully initialized.
  173. Raise an exception if bitcoind exits during initialization.
  174. '''
  175. while True:
  176. if process.poll() is not None:
  177. raise Exception('bitcoind exited with status %i during initialization' % process.returncode)
  178. try:
  179. rpc = get_rpc_proxy(url, i)
  180. blocks = rpc.getblockcount()
  181. break # break out of loop on success
  182. except IOError as e:
  183. if e.errno != errno.ECONNREFUSED: # Port not yet open?
  184. raise # unknown IO error
  185. except JSONRPCException as e: # Initialization phase
  186. if e.error['code'] != -28: # RPC in warmup?
  187. raise # unknown JSON RPC exception
  188. time.sleep(0.25)
  189. def initialize_chain(test_dir, num_nodes, cachedir):
  190. """
  191. Create a cache of a 200-block-long chain (with wallet) for MAX_NODES
  192. Afterward, create num_nodes copies from the cache
  193. """
  194. assert num_nodes <= MAX_NODES
  195. create_cache = False
  196. for i in range(MAX_NODES):
  197. if not os.path.isdir(os.path.join(cachedir, 'node'+str(i))):
  198. create_cache = True
  199. break
  200. if create_cache:
  201. logger.debug("Creating data directories from cached datadir")
  202. #find and delete old cache directories if any exist
  203. for i in range(MAX_NODES):
  204. if os.path.isdir(os.path.join(cachedir,"node"+str(i))):
  205. shutil.rmtree(os.path.join(cachedir,"node"+str(i)))
  206. # Create cache directories, run bitcoinds:
  207. for i in range(MAX_NODES):
  208. datadir=initialize_datadir(cachedir, i)
  209. args = [ os.getenv("BITCOIND", "bitcoind"), "-server", "-keypool=1", "-datadir="+datadir, "-discover=0" ]
  210. if i > 0:
  211. args.append("-connect=127.0.0.1:"+str(p2p_port(0)))
  212. bitcoind_processes[i] = subprocess.Popen(args)
  213. logger.debug("initialize_chain: bitcoind started, waiting for RPC to come up")
  214. wait_for_bitcoind_start(bitcoind_processes[i], rpc_url(i), i)
  215. logger.debug("initialize_chain: RPC successfully started")
  216. rpcs = []
  217. for i in range(MAX_NODES):
  218. try:
  219. rpcs.append(get_rpc_proxy(rpc_url(i), i))
  220. except:
  221. sys.stderr.write("Error connecting to "+url+"\n")
  222. sys.exit(1)
  223. # Create a 200-block-long chain; each of the 4 first nodes
  224. # gets 25 mature blocks and 25 immature.
  225. # Note: To preserve compatibility with older versions of
  226. # initialize_chain, only 4 nodes will generate coins.
  227. #
  228. # blocks are created with timestamps 10 minutes apart
  229. # starting from 2010 minutes in the past
  230. enable_mocktime()
  231. block_time = get_mocktime() - (201 * 10 * 60)
  232. for i in range(2):
  233. for peer in range(4):
  234. for j in range(25):
  235. set_node_times(rpcs, block_time)
  236. rpcs[peer].generate(1)
  237. block_time += 10*60
  238. # Must sync before next peer starts generating blocks
  239. sync_blocks(rpcs)
  240. # Shut them down, and clean up cache directories:
  241. stop_nodes(rpcs)
  242. disable_mocktime()
  243. for i in range(MAX_NODES):
  244. os.remove(log_filename(cachedir, i, "debug.log"))
  245. os.remove(log_filename(cachedir, i, "db.log"))
  246. os.remove(log_filename(cachedir, i, "peers.dat"))
  247. os.remove(log_filename(cachedir, i, "fee_estimates.dat"))
  248. for i in range(num_nodes):
  249. from_dir = os.path.join(cachedir, "node"+str(i))
  250. to_dir = os.path.join(test_dir, "node"+str(i))
  251. shutil.copytree(from_dir, to_dir)
  252. initialize_datadir(test_dir, i) # Overwrite port/rpcport in bitcoin.conf
  253. def initialize_chain_clean(test_dir, num_nodes):
  254. """
  255. Create an empty blockchain and num_nodes wallets.
  256. Useful if a test case wants complete control over initialization.
  257. """
  258. for i in range(num_nodes):
  259. datadir=initialize_datadir(test_dir, i)
  260. def start_node(i, dirname, extra_args=None, rpchost=None, timewait=None, binary=None, stderr=None):
  261. """
  262. Start a bitcoind and return RPC connection to it
  263. """
  264. datadir = os.path.join(dirname, "node"+str(i))
  265. if binary is None:
  266. binary = os.getenv("BITCOIND", "bitcoind")
  267. args = [ binary, "-datadir="+datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-logtimemicros", "-debug", "-mocktime="+str(get_mocktime()) ]
  268. if extra_args is not None: args.extend(extra_args)
  269. bitcoind_processes[i] = subprocess.Popen(args, stderr=stderr)
  270. logger.debug("initialize_chain: bitcoind started, waiting for RPC to come up")
  271. url = rpc_url(i, rpchost)
  272. wait_for_bitcoind_start(bitcoind_processes[i], url, i)
  273. logger.debug("initialize_chain: RPC successfully started")
  274. proxy = get_rpc_proxy(url, i, timeout=timewait)
  275. if COVERAGE_DIR:
  276. coverage.write_all_rpc_commands(COVERAGE_DIR, proxy)
  277. return proxy
  278. def assert_start_raises_init_error(i, dirname, extra_args=None, expected_msg=None):
  279. with tempfile.SpooledTemporaryFile(max_size=2**16) as log_stderr:
  280. try:
  281. node = start_node(i, dirname, extra_args, stderr=log_stderr)
  282. stop_node(node, i)
  283. except Exception as e:
  284. assert 'bitcoind exited' in str(e) #node must have shutdown
  285. if expected_msg is not None:
  286. log_stderr.seek(0)
  287. stderr = log_stderr.read().decode('utf-8')
  288. if expected_msg not in stderr:
  289. raise AssertionError("Expected error \"" + expected_msg + "\" not found in:\n" + stderr)
  290. else:
  291. if expected_msg is None:
  292. assert_msg = "bitcoind should have exited with an error"
  293. else:
  294. assert_msg = "bitcoind should have exited with expected error " + expected_msg
  295. raise AssertionError(assert_msg)
  296. def start_nodes(num_nodes, dirname, extra_args=None, rpchost=None, timewait=None, binary=None):
  297. """
  298. Start multiple bitcoinds, return RPC connections to them
  299. """
  300. if extra_args is None: extra_args = [ None for _ in range(num_nodes) ]
  301. if binary is None: binary = [ None for _ in range(num_nodes) ]
  302. rpcs = []
  303. try:
  304. for i in range(num_nodes):
  305. rpcs.append(start_node(i, dirname, extra_args[i], rpchost, timewait=timewait, binary=binary[i]))
  306. except: # If one node failed to start, stop the others
  307. stop_nodes(rpcs)
  308. raise
  309. return rpcs
  310. def log_filename(dirname, n_node, logname):
  311. return os.path.join(dirname, "node"+str(n_node), "regtest", logname)
  312. def stop_node(node, i):
  313. logger.debug("Stopping node %d" % i)
  314. try:
  315. node.stop()
  316. except http.client.CannotSendRequest as e:
  317. logger.exception("Unable to stop node")
  318. return_code = bitcoind_processes[i].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT)
  319. assert_equal(return_code, 0)
  320. del bitcoind_processes[i]
  321. def stop_nodes(nodes):
  322. for i, node in enumerate(nodes):
  323. stop_node(node, i)
  324. assert not bitcoind_processes.values() # All connections must be gone now
  325. def set_node_times(nodes, t):
  326. for node in nodes:
  327. node.setmocktime(t)
  328. def connect_nodes(from_connection, node_num):
  329. ip_port = "127.0.0.1:"+str(p2p_port(node_num))
  330. from_connection.addnode(ip_port, "onetry")
  331. # poll until version handshake complete to avoid race conditions
  332. # with transaction relaying
  333. while any(peer['version'] == 0 for peer in from_connection.getpeerinfo()):
  334. time.sleep(0.1)
  335. def connect_nodes_bi(nodes, a, b):
  336. connect_nodes(nodes[a], b)
  337. connect_nodes(nodes[b], a)
  338. def find_output(node, txid, amount):
  339. """
  340. Return index to output of txid with value amount
  341. Raises exception if there is none.
  342. """
  343. txdata = node.getrawtransaction(txid, 1)
  344. for i in range(len(txdata["vout"])):
  345. if txdata["vout"][i]["value"] == amount:
  346. return i
  347. raise RuntimeError("find_output txid %s : %s not found"%(txid,str(amount)))
  348. def gather_inputs(from_node, amount_needed, confirmations_required=1):
  349. """
  350. Return a random set of unspent txouts that are enough to pay amount_needed
  351. """
  352. assert(confirmations_required >=0)
  353. utxo = from_node.listunspent(confirmations_required)
  354. random.shuffle(utxo)
  355. inputs = []
  356. total_in = Decimal("0.00000000")
  357. while total_in < amount_needed and len(utxo) > 0:
  358. t = utxo.pop()
  359. total_in += t["amount"]
  360. inputs.append({ "txid" : t["txid"], "vout" : t["vout"], "address" : t["address"] } )
  361. if total_in < amount_needed:
  362. raise RuntimeError("Insufficient funds: need %d, have %d"%(amount_needed, total_in))
  363. return (total_in, inputs)
  364. def make_change(from_node, amount_in, amount_out, fee):
  365. """
  366. Create change output(s), return them
  367. """
  368. outputs = {}
  369. amount = amount_out+fee
  370. change = amount_in - amount
  371. if change > amount*2:
  372. # Create an extra change output to break up big inputs
  373. change_address = from_node.getnewaddress()
  374. # Split change in two, being careful of rounding:
  375. outputs[change_address] = Decimal(change/2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
  376. change = amount_in - amount - outputs[change_address]
  377. if change > 0:
  378. outputs[from_node.getnewaddress()] = change
  379. return outputs
  380. def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
  381. """
  382. Create a random transaction.
  383. Returns (txid, hex-encoded-transaction-data, fee)
  384. """
  385. from_node = random.choice(nodes)
  386. to_node = random.choice(nodes)
  387. fee = min_fee + fee_increment*random.randint(0,fee_variants)
  388. (total_in, inputs) = gather_inputs(from_node, amount+fee)
  389. outputs = make_change(from_node, total_in, amount, fee)
  390. outputs[to_node.getnewaddress()] = float(amount)
  391. rawtx = from_node.createrawtransaction(inputs, outputs)
  392. signresult = from_node.signrawtransaction(rawtx)
  393. txid = from_node.sendrawtransaction(signresult["hex"], True)
  394. return (txid, signresult["hex"], fee)
  395. def assert_fee_amount(fee, tx_size, fee_per_kB):
  396. """Assert the fee was in range"""
  397. target_fee = tx_size * fee_per_kB / 1000
  398. if fee < target_fee:
  399. raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)"%(str(fee), str(target_fee)))
  400. # allow the wallet's estimation to be at most 2 bytes off
  401. if fee > (tx_size + 2) * fee_per_kB / 1000:
  402. raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)"%(str(fee), str(target_fee)))
  403. def assert_equal(thing1, thing2, *args):
  404. if thing1 != thing2 or any(thing1 != arg for arg in args):
  405. raise AssertionError("not(%s)" % " == ".join(str(arg) for arg in (thing1, thing2) + args))
  406. def assert_greater_than(thing1, thing2):
  407. if thing1 <= thing2:
  408. raise AssertionError("%s <= %s"%(str(thing1),str(thing2)))
  409. def assert_greater_than_or_equal(thing1, thing2):
  410. if thing1 < thing2:
  411. raise AssertionError("%s < %s"%(str(thing1),str(thing2)))
  412. def assert_raises(exc, fun, *args, **kwds):
  413. assert_raises_message(exc, None, fun, *args, **kwds)
  414. def assert_raises_message(exc, message, fun, *args, **kwds):
  415. try:
  416. fun(*args, **kwds)
  417. except exc as e:
  418. if message is not None and message not in e.error['message']:
  419. raise AssertionError("Expected substring not found:"+e.error['message'])
  420. except Exception as e:
  421. raise AssertionError("Unexpected exception raised: "+type(e).__name__)
  422. else:
  423. raise AssertionError("No exception raised")
  424. def assert_raises_jsonrpc(code, message, fun, *args, **kwds):
  425. """Run an RPC and verify that a specific JSONRPC exception code and message is raised.
  426. Calls function `fun` with arguments `args` and `kwds`. Catches a JSONRPCException
  427. and verifies that the error code and message are as expected. Throws AssertionError if
  428. no JSONRPCException was returned or if the error code/message are not as expected.
  429. Args:
  430. code (int), optional: the error code returned by the RPC call (defined
  431. in src/rpc/protocol.h). Set to None if checking the error code is not required.
  432. message (string), optional: [a substring of] the error string returned by the
  433. RPC call. Set to None if checking the error string is not required
  434. fun (function): the function to call. This should be the name of an RPC.
  435. args*: positional arguments for the function.
  436. kwds**: named arguments for the function.
  437. """
  438. try:
  439. fun(*args, **kwds)
  440. except JSONRPCException as e:
  441. # JSONRPCException was thrown as expected. Check the code and message values are correct.
  442. if (code is not None) and (code != e.error["code"]):
  443. raise AssertionError("Unexpected JSONRPC error code %i" % e.error["code"])
  444. if (message is not None) and (message not in e.error['message']):
  445. raise AssertionError("Expected substring not found:"+e.error['message'])
  446. except Exception as e:
  447. raise AssertionError("Unexpected exception raised: "+type(e).__name__)
  448. else:
  449. raise AssertionError("No exception raised")
  450. def assert_is_hex_string(string):
  451. try:
  452. int(string, 16)
  453. except Exception as e:
  454. raise AssertionError(
  455. "Couldn't interpret %r as hexadecimal; raised: %s" % (string, e))
  456. def assert_is_hash_string(string, length=64):
  457. if not isinstance(string, str):
  458. raise AssertionError("Expected a string, got type %r" % type(string))
  459. elif length and len(string) != length:
  460. raise AssertionError(
  461. "String of length %d expected; got %d" % (length, len(string)))
  462. elif not re.match('[abcdef0-9]+$', string):
  463. raise AssertionError(
  464. "String %r contains invalid characters for a hash." % string)
  465. def assert_array_result(object_array, to_match, expected, should_not_find = False):
  466. """
  467. Pass in array of JSON objects, a dictionary with key/value pairs
  468. to match against, and another dictionary with expected key/value
  469. pairs.
  470. If the should_not_find flag is true, to_match should not be found
  471. in object_array
  472. """
  473. if should_not_find == True:
  474. assert_equal(expected, { })
  475. num_matched = 0
  476. for item in object_array:
  477. all_match = True
  478. for key,value in to_match.items():
  479. if item[key] != value:
  480. all_match = False
  481. if not all_match:
  482. continue
  483. elif should_not_find == True:
  484. num_matched = num_matched+1
  485. for key,value in expected.items():
  486. if item[key] != value:
  487. raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value)))
  488. num_matched = num_matched+1
  489. if num_matched == 0 and should_not_find != True:
  490. raise AssertionError("No objects matched %s"%(str(to_match)))
  491. if num_matched > 0 and should_not_find == True:
  492. raise AssertionError("Objects were found %s"%(str(to_match)))
  493. def satoshi_round(amount):
  494. return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
  495. # Helper to create at least "count" utxos
  496. # Pass in a fee that is sufficient for relay and mining new transactions.
  497. def create_confirmed_utxos(fee, node, count):
  498. node.generate(int(0.5*count)+101)
  499. utxos = node.listunspent()
  500. iterations = count - len(utxos)
  501. addr1 = node.getnewaddress()
  502. addr2 = node.getnewaddress()
  503. if iterations <= 0:
  504. return utxos
  505. for i in range(iterations):
  506. t = utxos.pop()
  507. inputs = []
  508. inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})
  509. outputs = {}
  510. send_value = t['amount'] - fee
  511. outputs[addr1] = satoshi_round(send_value/2)
  512. outputs[addr2] = satoshi_round(send_value/2)
  513. raw_tx = node.createrawtransaction(inputs, outputs)
  514. signed_tx = node.signrawtransaction(raw_tx)["hex"]
  515. txid = node.sendrawtransaction(signed_tx)
  516. while (node.getmempoolinfo()['size'] > 0):
  517. node.generate(1)
  518. utxos = node.listunspent()
  519. assert(len(utxos) >= count)
  520. return utxos
  521. # Create large OP_RETURN txouts that can be appended to a transaction
  522. # to make it large (helper for constructing large transactions).
  523. def gen_return_txouts():
  524. # Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create
  525. # So we have big transactions (and therefore can't fit very many into each block)
  526. # create one script_pubkey
  527. script_pubkey = "6a4d0200" #OP_RETURN OP_PUSH2 512 bytes
  528. for i in range (512):
  529. script_pubkey = script_pubkey + "01"
  530. # concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change
  531. txouts = "81"
  532. for k in range(128):
  533. # add txout value
  534. txouts = txouts + "0000000000000000"
  535. # add length of script_pubkey
  536. txouts = txouts + "fd0402"
  537. # add script_pubkey
  538. txouts = txouts + script_pubkey
  539. return txouts
  540. def create_tx(node, coinbase, to_address, amount):
  541. inputs = [{ "txid" : coinbase, "vout" : 0}]
  542. outputs = { to_address : amount }
  543. rawtx = node.createrawtransaction(inputs, outputs)
  544. signresult = node.signrawtransaction(rawtx)
  545. assert_equal(signresult["complete"], True)
  546. return signresult["hex"]
  547. # Create a spend of each passed-in utxo, splicing in "txouts" to each raw
  548. # transaction to make it large. See gen_return_txouts() above.
  549. def create_lots_of_big_transactions(node, txouts, utxos, num, fee):
  550. addr = node.getnewaddress()
  551. txids = []
  552. for _ in range(num):
  553. t = utxos.pop()
  554. inputs=[{ "txid" : t["txid"], "vout" : t["vout"]}]
  555. outputs = {}
  556. change = t['amount'] - fee
  557. outputs[addr] = satoshi_round(change)
  558. rawtx = node.createrawtransaction(inputs, outputs)
  559. newtx = rawtx[0:92]
  560. newtx = newtx + txouts
  561. newtx = newtx + rawtx[94:]
  562. signresult = node.signrawtransaction(newtx, None, None, "NONE")
  563. txid = node.sendrawtransaction(signresult["hex"], True)
  564. txids.append(txid)
  565. return txids
  566. def mine_large_block(node, utxos=None):
  567. # generate a 66k transaction,
  568. # and 14 of them is close to the 1MB block limit
  569. num = 14
  570. txouts = gen_return_txouts()
  571. utxos = utxos if utxos is not None else []
  572. if len(utxos) < num:
  573. utxos.clear()
  574. utxos.extend(node.listunspent())
  575. fee = 100 * node.getnetworkinfo()["relayfee"]
  576. create_lots_of_big_transactions(node, txouts, utxos, num, fee=fee)
  577. node.generate(1)
  578. def get_bip9_status(node, key):
  579. info = node.getblockchaininfo()
  580. return info['bip9_softforks'][key]