Advertisement
Guest User

Untitled

a guest
Nov 13th, 2011
570
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 73.06 KB | None | 0 0
  1. # Copyright(C) 2011 by John Tobey <John.Tobey@gmail.com>
  2.  
  3. # This program is free software: you can redistribute it and/or modify
  4. # it under the terms of the GNU Affero General Public License as
  5. # published by the Free Software Foundation, either version 3 of the
  6. # License, or (at your option) any later version.
  7. #
  8. # This program is distributed in the hope that it will be useful, but
  9. # WITHOUT ANY WARRANTY; without even the implied warranty of
  10. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  11. # Affero General Public License for more details.
  12. #
  13. # You should have received a copy of the GNU Affero General Public
  14. # License along with this program. If not, see
  15. # <http://www.gnu.org/licenses/agpl.html>.
  16.  
  17. import os
  18. import re
  19. import binascii
  20.  
  21. # bitcointools -- modified deserialize.py to return raw transaction
  22. import BCDataStream
  23. import deserialize
  24. import util
  25. import warnings
  26.  
  27. SCHEMA_VERSION = "Abe24"
  28.  
  29. WORK_BITS = 304 # XXX more than necessary.
  30.  
  31. BITCOIN_MAGIC = "\xf9\xbe\xb4\xd9"
  32. BITCOIN_MAGIC_ID = 1
  33. BITCOIN_POLICY_ID = 1
  34. BITCOIN_CHAIN_ID = 1
  35. BITCOIN_ADDRESS_VERSION = "\0"
  36.  
  37. TESTNET_MAGIC = "\xfa\xbf\xb5\xda"
  38. TESTNET_MAGIC_ID = 2
  39. TESTNET_POLICY_ID = 2
  40. TESTNET_CHAIN_ID = 2
  41. TESTNET_ADDRESS_VERSION = "\x6f"
  42.  
  43. NAMECOIN_MAGIC = "\xf9\xbe\xb4\xfe"
  44. NAMECOIN_MAGIC_ID = 3
  45. NAMECOIN_POLICY_ID = 3
  46. NAMECOIN_CHAIN_ID = 3
  47. NAMECOIN_ADDRESS_VERSION = "\x34"
  48.  
  49. WEEDS_MAGIC = "\xf8\xbf\xb5\xda"
  50. WEEDS_MAGIC_ID = 4
  51. WEEDS_POLICY_ID = 4
  52. WEEDS_CHAIN_ID = 4
  53. WEEDS_ADDRESS_VERSION = "\xf3"
  54.  
  55. BEER_MAGIC = "\xf7\xbf\xb5\xdb"
  56. BEER_MAGIC_ID = 5
  57. BEER_POLICY_ID = 5
  58. BEER_CHAIN_ID = 5
  59. BEER_ADDRESS_VERSION = "\xf2"
  60.  
  61. SLC_MAGIC = "\xde\xad\xba\xbe"
  62. SLC_MAGIC_ID = 6
  63. SLC_POLICY_ID = 6
  64. SLC_CHAIN_ID = 6
  65. SLC_ADDRESS_VERSION = "\0"
  66.  
  67. #LC_MAGIC = "\xfc\xc1\xb7\xdc"
  68. LC_MAGIC = "\xfb\xc0\xb6\xdb"
  69. LC_MAGIC_ID = 7
  70. LC_POLICY_ID = 7
  71. LC_CHAIN_ID = 7
  72. LC_ADDRESS_VERSION = "\x30" '\u0030'
  73.  
  74. GENESIS_HASH_PREV = "\0" * 32
  75.  
  76. SCRIPT_ADDRESS_RE = re.compile("\x76\xa9\x14(.{20})\x88\xac", re.DOTALL)
  77. SCRIPT_PUBKEY_RE = re.compile("\x41(.{65})\xac", re.DOTALL)
  78.  
  79. NO_CLOB = 'BUG_NO_CLOB'
  80.  
  81. class DataStore(object):
  82.  
  83. """
  84. Bitcoin data storage class based on DB-API 2 and SQL1992 with
  85. workarounds to support SQLite3 and PostgreSQL/psycopg2.
  86. """
  87.  
  88. def __init__(store, args):
  89. """
  90. Opens and stores a connection to the SQL database.
  91.  
  92. args.dbtype should name a DB-API 2 driver module, e.g.,
  93. "sqlite3".
  94.  
  95. args.connect_args should be an argument to the module's
  96. connect() method, or None for no argument, or a list of
  97. arguments, or a dictionary of named arguments.
  98.  
  99. args.datadir names Bitcoin data directories containing
  100. blk0001.dat to scan for new blocks.
  101. """
  102. store.args = args
  103. store.log_sql = args.log_sql
  104. store.module = __import__(args.dbtype)
  105. store.connect()
  106. store._ddl = store._get_ddl()
  107.  
  108. # Read the CONFIG and CONFIGVAR tables if present.
  109. store.config = store._read_config()
  110.  
  111. if store.config is None:
  112. store.initialize()
  113. elif store.config['schema_version'] == SCHEMA_VERSION:
  114. pass
  115. elif args.upgrade:
  116. store._set_sql_flavour()
  117. import upgrade
  118. upgrade.upgrade_schema(store)
  119. else:
  120. raise Exception(
  121. "Database schema version (%s) does not match software"
  122. " (%s). Please run with --upgrade to convert database."
  123. % (store.config['schema_version'], SCHEMA_VERSION))
  124.  
  125. store._set_sql_flavour()
  126. store._blocks = {}
  127. store._init_datadirs()
  128.  
  129. store.commit_bytes = args.commit_bytes
  130. if store.commit_bytes is None:
  131. store.commit_bytes = 100000
  132. else:
  133. store.commit_bytes = int(store.commit_bytes)
  134.  
  135.  
  136. def connect(store):
  137. cargs = store.args.connect_args
  138.  
  139. if cargs is None:
  140. conn = store.module.connect()
  141. else:
  142. if isinstance(cargs, dict):
  143. if "" in cargs:
  144. cargs = cargs.copy()
  145. nkwargs = cargs[""]
  146. del(cargs[""])
  147. if isinstance(nkwargs, list):
  148. conn = store.module.connect(*nkwargs, **cargs)
  149. else:
  150. conn = store.module.connect(nkwargs, **cargs)
  151. else:
  152. conn = store.module.connect(**cargs)
  153. elif isinstance(cargs, list):
  154. conn = store.module.connect(*cargs)
  155. else:
  156. conn = store.module.connect(cargs)
  157.  
  158. store.conn = conn
  159. store.cursor = conn.cursor()
  160.  
  161. def reconnect(store):
  162. print "Reconnecting to database."
  163. try:
  164. store.cursor.close()
  165. except:
  166. pass
  167. try:
  168. store.conn.close()
  169. except:
  170. pass
  171. store.connect()
  172.  
  173. def _read_config(store):
  174. # Read table CONFIGVAR if it exists.
  175. config = {}
  176. try:
  177. store.cursor.execute("""
  178. SELECT configvar_name, configvar_value
  179. FROM configvar""")
  180. for row in store.cursor.fetchall():
  181. name, value = row
  182. config[name] = value
  183. if config:
  184. return config
  185.  
  186. except store.module.DatabaseError:
  187. try:
  188. store.rollback()
  189. except:
  190. pass
  191.  
  192. # Read legacy table CONFIG if it exists.
  193. try:
  194. store.cursor.execute("""
  195. SELECT schema_version, binary_type
  196. FROM config
  197. WHERE config_id = 1""")
  198. row = store.cursor.fetchone()
  199. sv, btype = row
  200. return { 'schema_version': sv, 'binary_type': btype }
  201. except:
  202. try:
  203. store.rollback()
  204. except:
  205. pass
  206.  
  207. # Return None to indicate no schema found.
  208. return None
  209.  
  210. # Accommodate SQL quirks.
  211. def _set_sql_flavour(store):
  212. def identity(x):
  213. return x
  214. transform = identity
  215.  
  216. if store.module.paramstyle in ('format', 'pyformat'):
  217. transform = store._qmark_to_format(transform)
  218. elif store.module.paramstyle == 'named':
  219. transform = store._named_to_format(transform)
  220. elif store.module.paramstyle != 'qmark':
  221. warnings.warn("Database parameter style is " +
  222. "%s, trying qmark" % module.paramstyle)
  223. pass
  224.  
  225. # Binary I/O with the database.
  226. # Hashes are a special type; since the protocol treats them as
  227. # 256-bit integers and represents them as little endian, we
  228. # have to reverse them in hex to satisfy human expectations.
  229. def rev(x):
  230. return x[::-1]
  231. def to_hex(x):
  232. return None if x is None else binascii.hexlify(x)
  233. def from_hex(x):
  234. return None if x is None else binascii.unhexlify(x)
  235. def to_hex_rev(x):
  236. return None if x is None else binascii.hexlify(x[::-1])
  237. def from_hex_rev(x):
  238. return None if x is None else binascii.unhexlify(x)[::-1]
  239.  
  240. btype = store.config.get('binary_type')
  241.  
  242. if btype in (None, 'str'):
  243. binin = identity
  244. binin_hex = from_hex
  245. binout = identity
  246. binout_hex = to_hex
  247. hashin = rev
  248. hashin_hex = from_hex
  249. hashout = rev
  250. hashout_hex = to_hex
  251.  
  252. elif btype in ("buffer", "bytearray"):
  253. if btype == "buffer":
  254. def to_btype(x):
  255. return None if x is None else buffer(x)
  256. else:
  257. def to_btype(x):
  258. return None if x is None else bytearray(x)
  259.  
  260. binin = to_btype
  261. binin_hex = lambda x: to_btype(from_hex(x))
  262. binout = str
  263. binout_hex = to_hex
  264. hashin = lambda x: to_btype(rev(x))
  265. hashin_hex = lambda x: to_btype(from_hex(x))
  266. hashout = rev
  267. hashout_hex = to_hex
  268.  
  269. elif btype == "hex":
  270. transform = store._sql_binary_as_hex(transform)
  271. binin = to_hex
  272. binin_hex = identity
  273. binout = from_hex
  274. binout_hex = identity
  275. hashin = to_hex_rev
  276. hashin_hex = identity
  277. hashout = from_hex_rev
  278. hashout_hex = identity
  279.  
  280. else:
  281. raise Exception("Unsupported binary-type %s" % btype)
  282.  
  283. itype = store.config.get('int_type')
  284.  
  285. if itype in (None, 'int'):
  286. intin = identity
  287.  
  288. elif itype == 'decimal':
  289. import decimal
  290. intin = decimal.Decimal
  291.  
  292. elif itype == 'str':
  293. intin = str
  294. # Work around sqlite3's integer overflow.
  295. transform = store._approximate_txout(transform)
  296.  
  297. else:
  298. raise Exception("Unsupported int-type %s" % itype)
  299.  
  300. stype = store.config.get('sequence_type')
  301. if stype in (None, 'update'):
  302. new_id = lambda key: store._new_id_update(key)
  303.  
  304. else:
  305. raise Exception("Unsupported sequence-type %s" % stype)
  306.  
  307. # Convert Oracle LOB to str.
  308. if hasattr(store.module, "LOB") and isinstance(store.module.LOB, type):
  309. def fix_lob(fn):
  310. def ret(x):
  311. return None if x is None else fn(str(x))
  312. return ret
  313. binout = fix_lob(binout)
  314. binout_hex = fix_lob(binout_hex)
  315.  
  316. store.sql_transform = transform
  317. store._sql_cache = {}
  318.  
  319. store.binin = binin
  320. store.binin_hex = binin_hex
  321. store.binout = binout
  322. store.binout_hex = binout_hex
  323. store.hashin = hashin
  324. store.hashin_hex = hashin_hex
  325. store.hashout = hashout
  326. store.hashout_hex = hashout_hex
  327.  
  328. # Might reimplement these someday...
  329. store.binout_int = lambda x: int(binout_hex(x), 16)
  330. def binin_int(x, bits):
  331. if x is None:
  332. return None
  333. return binin_hex(("%%0%dx" % (bits / 4)) % x)
  334. store.binin_int = binin_int
  335.  
  336. store.intin = intin
  337. store.new_id = new_id
  338.  
  339. def sql(store, stmt, params=()):
  340. cached = store._sql_cache.get(stmt)
  341. if cached is None:
  342. cached = store.sql_transform(stmt)
  343. store._sql_cache[stmt] = cached
  344. if store.log_sql:
  345. print "SQL:", cached, params
  346. store.cursor.execute(cached, params)
  347.  
  348. def ddl(store, stmt):
  349. if stmt.lstrip().startswith("CREATE TABLE "):
  350. stmt += store.config['create_table_epilogue']
  351. stmt = store._sql_fallback_to_lob(store.sql_transform(stmt))
  352. if store.log_sql:
  353. print "SQL DDL:", stmt
  354. store.cursor.execute(stmt)
  355. if store.config['ddl_implicit_commit'] == 'false':
  356. store.commit()
  357.  
  358. # Convert standard placeholders to Python "format" style.
  359. def _qmark_to_format(store, fn):
  360. def ret(stmt):
  361. # XXX Simplified by assuming no literals contain "?".
  362. return fn(stmt.replace('%', '%%').replace("?", "%s"))
  363. return ret
  364.  
  365. # Convert standard placeholders to Python "named" style.
  366. def _named_to_format(store, fn):
  367. def ret(stmt):
  368. i = [0]
  369. def newname(m):
  370. i[0] += 1
  371. return ":p%d" % (i[0],)
  372. # XXX Simplified by assuming no literals contain "?".
  373. return fn(re.sub("\\?", newname, stmt))
  374. return ret
  375.  
  376. # Convert the standard BIT type to a hex string for databases
  377. # and drivers that don't support BIT.
  378. def _sql_binary_as_hex(store, fn):
  379. patt = re.compile("BIT((?: VARYING)?)\\(([0-9]+)\\)")
  380. def fixup(match):
  381. # XXX This assumes no string literals match.
  382. return (("VARCHAR(" if match.group(1) else "CHAR(") +
  383. str(int(match.group(2)) / 4) + ")")
  384. def ret(stmt):
  385. # XXX This assumes no string literals match.
  386. return fn(patt.sub(fixup, stmt).replace("X'", "'"))
  387. return ret
  388.  
  389. # Converts VARCHAR types that are too long to CLOB or similar.
  390. def _sql_fallback_to_lob(store, stmt):
  391. try:
  392. max_varchar = int(store.config['max_varchar'])
  393. clob_type = store.config['clob_type']
  394. except:
  395. return stmt
  396.  
  397. patt = re.compile("VARCHAR\\(([0-9]+)\\)")
  398.  
  399. def fixup(match):
  400. # XXX This assumes no string literals match.
  401. width = int(match.group(1))
  402. if width > max_varchar and clob_type != NO_CLOB:
  403. return clob_type
  404. return "VARCHAR(%d)" % (width,)
  405.  
  406. return patt.sub(fixup, stmt)
  407.  
  408. def _approximate_txout(store, fn):
  409. def ret(stmt):
  410. return fn(re.sub(
  411. r'\btxout_value txout_approx_value\b',
  412. 'CAST(txout_value AS DOUBLE PRECISION) txout_approx_value',
  413. stmt))
  414. return ret
  415.  
  416. def selectrow(store, stmt, params=()):
  417. store.sql(stmt, params)
  418. ret = store.cursor.fetchone()
  419. if store.log_sql:
  420. print "SQL FETCH:", ret
  421. return ret
  422.  
  423. def selectall(store, stmt, params=()):
  424. store.sql(stmt, params)
  425. ret = store.cursor.fetchall()
  426. if store.log_sql:
  427. print "SQL FETCHALL:", ret
  428. return ret
  429.  
  430. def _init_datadirs(store):
  431. datadirs = {}
  432. for row in store.selectall("""
  433. SELECT datadir_id, dirname, blkfile_number, blkfile_offset, chain_id
  434. FROM datadir"""):
  435. id, dir, num, offs, chain_id = row
  436. datadirs[dir] = {
  437. "id": id,
  438. "dirname": dir,
  439. "blkfile_number": int(num),
  440. "blkfile_offset": int(offs),
  441. "chain_id": None if chain_id is None else int(chain_id)}
  442.  
  443. # By default, scan every dir we know. This doesn't happen in
  444. # practise, because abe.py sets ~/.bitcoin as default datadir.
  445. if store.args.datadir is None:
  446. store.datadirs = datadirs.values()
  447. return
  448.  
  449. store.datadirs = []
  450. for dircfg in store.args.datadir:
  451. if isinstance(dircfg, dict):
  452. dirname = dircfg.get('dirname')
  453. if dirname is None:
  454. raise ValueError(
  455. 'Missing dirname in datadir configuration: '
  456. + str(dircfg))
  457. if dirname in datadirs:
  458. store.datadirs.append(datadirs[dirname])
  459. continue
  460.  
  461. chain_id = dircfg.get('chain_id')
  462. if chain_id is None:
  463. chain_name = dircfg.get('chain')
  464. row = store.selectrow(
  465. "SELECT chain_id FROM chain WHERE chain_name = ?",
  466. (chain_name,))
  467. if row is not None:
  468. chain_id = row[0]
  469. elif chain_name is not None:
  470. chain_id = store.new_id('chain')
  471. code3 = dircfg.get('code3')
  472. if code3 is None:
  473. code3 = '000' if chain_id > 999 else "%03d" % (
  474. chain_id,)
  475. addr_vers = dircfg.get('address_version')
  476. if addr_vers is None:
  477. addr_vers = BITCOIN_ADDRESS_VERSION
  478. elif isinstance(addr_vers, unicode):
  479. addr_vers = addr_vers.encode('latin_1')
  480. store.sql("""
  481. INSERT INTO chain (
  482. chain_id, chain_name, chain_code3,
  483. chain_address_version
  484. ) VALUES (?, ?, ?, ?)""",
  485. (chain_id, chain_name, code3,
  486. store.binin(addr_vers)))
  487. store.commit()
  488. print "Assigned chain_id", chain_id, "to", chain_name
  489.  
  490. elif dircfg in datadirs:
  491. store.datadirs.append(datadirs[dircfg])
  492. continue
  493. else:
  494. # Not a dict. A string naming a directory holding
  495. # standard chains.
  496. dirname = dircfg
  497. chain_id = None
  498.  
  499. store.datadirs.append({
  500. "id": store.new_id("datadir"),
  501. "dirname": dirname,
  502. "blkfile_number": 1,
  503. "blkfile_offset": 0,
  504. "chain_id": chain_id,
  505. })
  506.  
  507. def _new_id_update(store, key):
  508. try:
  509. row = store.selectrow(
  510. "SELECT nextid FROM abe_sequences WHERE sequence_key = ?",
  511. (key,))
  512. except store.module.DatabaseError:
  513. # XXX Should not rollback in new_id unless configuring.
  514. store.rollback()
  515. store.ddl(store._ddl['abe_sequences'])
  516. row = None
  517. if row is None:
  518. (ret,) = store.selectrow("SELECT MAX(" + key + "_id) FROM " + key)
  519. ret = 1 if ret is None else ret + 1
  520. if ret < 100000 and key == "chain":
  521. # Avoid clash with future built-in chains.
  522. ret = 100000
  523. store.sql("INSERT INTO abe_sequences (sequence_key, nextid)"
  524. " VALUES (?, ?)", (key, ret))
  525. else:
  526. ret = int(row[0])
  527. store.sql("UPDATE abe_sequences SET nextid = nextid + 1"
  528. " WHERE sequence_key = ?",
  529. (key,))
  530. return ret
  531.  
  532. def commit(store):
  533. if store.log_sql:
  534. print "SQL COMMIT"
  535. store.conn.commit()
  536.  
  537. def rollback(store):
  538. if store.log_sql:
  539. print "SQL ROLLBACK"
  540. store.conn.rollback()
  541.  
  542. def close(store):
  543. if store.log_sql:
  544. print "SQL CLOSE"
  545. store.conn.close()
  546.  
  547. def _get_ddl(store):
  548. return {
  549. "chain_summary":
  550. # XXX I could do a lot with MATERIALIZED views.
  551. """CREATE VIEW chain_summary AS SELECT
  552. cc.chain_id,
  553. cc.in_longest,
  554. b.block_id,
  555. b.block_hash,
  556. b.block_version,
  557. b.block_hashMerkleRoot,
  558. b.block_nTime,
  559. b.block_nBits,
  560. b.block_nNonce,
  561. cc.block_height,
  562. b.prev_block_id,
  563. prev.block_hash prev_block_hash,
  564. b.block_chain_work,
  565. b.block_num_tx,
  566. b.block_value_in,
  567. b.block_value_out,
  568. b.block_total_satoshis,
  569. b.block_total_seconds,
  570. b.block_satoshi_seconds,
  571. b.block_total_ss,
  572. b.block_ss_destroyed
  573. FROM chain_candidate cc
  574. JOIN block b ON (cc.block_id = b.block_id)
  575. LEFT JOIN block prev ON (b.prev_block_id = prev.block_id)""",
  576.  
  577. "txout_detail":
  578. """CREATE VIEW txout_detail AS SELECT
  579. cc.chain_id,
  580. cc.in_longest,
  581. cc.block_id,
  582. b.block_hash,
  583. b.block_height,
  584. block_tx.tx_pos,
  585. tx.tx_id,
  586. tx.tx_hash,
  587. tx.tx_lockTime,
  588. tx.tx_version,
  589. tx.tx_size,
  590. txout.txout_id,
  591. txout.txout_pos,
  592. txout.txout_value,
  593. txout.txout_scriptPubKey,
  594. pubkey.pubkey_id,
  595. pubkey.pubkey_hash,
  596. pubkey.pubkey
  597. FROM chain_candidate cc
  598. JOIN block b ON (cc.block_id = b.block_id)
  599. JOIN block_tx ON (b.block_id = block_tx.block_id)
  600. JOIN tx ON (tx.tx_id = block_tx.tx_id)
  601. JOIN txout ON (tx.tx_id = txout.tx_id)
  602. LEFT JOIN pubkey ON (txout.pubkey_id = pubkey.pubkey_id)""",
  603.  
  604. "txin_detail":
  605. """CREATE VIEW txin_detail AS SELECT
  606. cc.chain_id,
  607. cc.in_longest,
  608. cc.block_id,
  609. b.block_hash,
  610. b.block_height,
  611. block_tx.tx_pos,
  612. tx.tx_id,
  613. tx.tx_hash,
  614. tx.tx_lockTime,
  615. tx.tx_version,
  616. tx.tx_size,
  617. txin.txin_id,
  618. txin.txin_pos,
  619. txin.txout_id prevout_id,
  620. txin.txin_scriptSig,
  621. txin.txin_sequence,
  622. prevout.txout_value txin_value,
  623. pubkey.pubkey_id,
  624. pubkey.pubkey_hash,
  625. pubkey.pubkey
  626. FROM chain_candidate cc
  627. JOIN block b ON (cc.block_id = b.block_id)
  628. JOIN block_tx ON (b.block_id = block_tx.block_id)
  629. JOIN tx ON (tx.tx_id = block_tx.tx_id)
  630. JOIN txin ON (tx.tx_id = txin.tx_id)
  631. LEFT JOIN txout prevout ON (txin.txout_id = prevout.txout_id)
  632. LEFT JOIN pubkey
  633. ON (prevout.pubkey_id = pubkey.pubkey_id)""",
  634.  
  635. "txout_approx":
  636. # View of txout for drivers like sqlite3 that can not handle large
  637. # integer arithmetic. For them, we transform the definition of
  638. # txout_approx_value to DOUBLE PRECISION (approximate) by a CAST.
  639. """CREATE VIEW txout_approx AS SELECT
  640. txout_id,
  641. tx_id,
  642. txout_value txout_approx_value
  643. FROM txout""",
  644.  
  645. "configvar":
  646. # ABE accounting. This table is read without knowledge of the
  647. # database's SQL quirks, so it must use only the most widely supported
  648. # features.
  649. """CREATE TABLE configvar (
  650. configvar_name VARCHAR(100) NOT NULL PRIMARY KEY,
  651. configvar_value VARCHAR(255)
  652. )""",
  653.  
  654. "abe_sequences":
  655. """CREATE TABLE abe_sequences (
  656. sequence_key VARCHAR(100) NOT NULL PRIMARY KEY,
  657. nextid NUMERIC(30)
  658. )""",
  659. }
  660.  
  661. def initialize(store):
  662. """
  663. Create the database schema.
  664. """
  665. store.configure()
  666.  
  667. for stmt in (
  668.  
  669. store._ddl['configvar'],
  670.  
  671. """CREATE TABLE datadir (
  672. datadir_id NUMERIC(10) NOT NULL PRIMARY KEY,
  673. dirname VARCHAR(2000) NOT NULL,
  674. blkfile_number NUMERIC(4) NULL,
  675. blkfile_offset NUMERIC(20) NULL,
  676. chain_id NUMERIC(10) NULL
  677. )""",
  678.  
  679. # MAGIC lists the magic numbers seen in messages and block files, known
  680. # in the original Bitcoin source as `pchMessageStart'.
  681. """CREATE TABLE magic (
  682. magic_id NUMERIC(10) NOT NULL PRIMARY KEY,
  683. magic BIT(32) UNIQUE NOT NULL,
  684. magic_name VARCHAR(100) UNIQUE NOT NULL
  685. )""",
  686.  
  687. # POLICY identifies a block acceptance policy.
  688. """CREATE TABLE policy (
  689. policy_id NUMERIC(10) NOT NULL PRIMARY KEY,
  690. policy_name VARCHAR(100) UNIQUE NOT NULL
  691. )""",
  692.  
  693. # A block of the type used by Bitcoin.
  694. """CREATE TABLE block (
  695. block_id NUMERIC(14) NOT NULL PRIMARY KEY,
  696. block_hash BIT(256) UNIQUE NOT NULL,
  697. block_version NUMERIC(10),
  698. block_hashMerkleRoot BIT(256),
  699. block_nTime NUMERIC(20),
  700. block_nBits NUMERIC(10),
  701. block_nNonce NUMERIC(10),
  702. block_height NUMERIC(14),
  703. prev_block_id NUMERIC(14) NULL,
  704. block_chain_work BIT(""" + str(WORK_BITS) + """),
  705. block_value_in NUMERIC(30),
  706. block_value_out NUMERIC(30),
  707. block_total_satoshis NUMERIC(26),
  708. block_total_seconds NUMERIC(20),
  709. block_satoshi_seconds NUMERIC(28),
  710. block_total_ss NUMERIC(28),
  711. block_num_tx NUMERIC(10) NOT NULL,
  712. block_ss_destroyed NUMERIC(28),
  713. FOREIGN KEY (prev_block_id)
  714. REFERENCES block (block_id)
  715. )""",
  716.  
  717. # CHAIN comprises a magic number, a policy, and (indirectly via
  718. # CHAIN_LAST_BLOCK_ID and the referenced block's ancestors) a genesis
  719. # block, possibly null. A chain may have a currency code.
  720. """CREATE TABLE chain (
  721. chain_id NUMERIC(10) NOT NULL PRIMARY KEY,
  722. magic_id NUMERIC(10) NULL,
  723. policy_id NUMERIC(10) NULL,
  724. chain_name VARCHAR(100) UNIQUE NOT NULL,
  725. chain_code3 CHAR(3) NULL,
  726. chain_address_version BIT VARYING(800) NOT NULL,
  727. chain_last_block_id NUMERIC(14) NULL,
  728. FOREIGN KEY (magic_id) REFERENCES magic (magic_id),
  729. FOREIGN KEY (policy_id) REFERENCES policy (policy_id),
  730. FOREIGN KEY (chain_last_block_id)
  731. REFERENCES block (block_id)
  732. )""",
  733.  
  734. # CHAIN_CANDIDATE lists blocks that are, or might become, part of the
  735. # given chain. IN_LONGEST is 1 when the block is in the chain, else 0.
  736. # IN_LONGEST denormalizes information stored canonically in
  737. # CHAIN.CHAIN_LAST_BLOCK_ID and BLOCK.PREV_BLOCK_ID.
  738. """CREATE TABLE chain_candidate (
  739. chain_id NUMERIC(10) NOT NULL,
  740. block_id NUMERIC(14) NOT NULL,
  741. in_longest NUMERIC(1),
  742. block_height NUMERIC(14),
  743. PRIMARY KEY (chain_id, block_id),
  744. FOREIGN KEY (block_id) REFERENCES block (block_id)
  745. )""",
  746. """CREATE INDEX x_cc_block ON chain_candidate (block_id)""",
  747. """CREATE INDEX x_cc_chain_block_height
  748. ON chain_candidate (chain_id, block_height)""",
  749. """CREATE INDEX x_cc_block_height ON chain_candidate (block_height)""",
  750.  
  751. # An orphan block must remember its hashPrev.
  752. """CREATE TABLE orphan_block (
  753. block_id NUMERIC(14) NOT NULL PRIMARY KEY,
  754. block_hashPrev BIT(256) NOT NULL,
  755. FOREIGN KEY (block_id) REFERENCES block (block_id)
  756. )""",
  757. """CREATE INDEX x_orphan_block_hashPrev ON orphan_block (block_hashPrev)""",
  758.  
  759. # Denormalize the relationship inverse to BLOCK.PREV_BLOCK_ID.
  760. """CREATE TABLE block_next (
  761. block_id NUMERIC(14) NOT NULL,
  762. next_block_id NUMERIC(14) NOT NULL,
  763. PRIMARY KEY (block_id, next_block_id),
  764. FOREIGN KEY (block_id) REFERENCES block (block_id),
  765. FOREIGN KEY (next_block_id) REFERENCES block (block_id)
  766. )""",
  767.  
  768. # A transaction of the type used by Bitcoin.
  769. """CREATE TABLE tx (
  770. tx_id NUMERIC(26) NOT NULL PRIMARY KEY,
  771. tx_hash BIT(256) UNIQUE NOT NULL,
  772. tx_version NUMERIC(10),
  773. tx_lockTime NUMERIC(10),
  774. tx_size NUMERIC(10)
  775. )""",
  776.  
  777. # Presence of transactions in blocks is many-to-many.
  778. """CREATE TABLE block_tx (
  779. block_id NUMERIC(14) NOT NULL,
  780. tx_id NUMERIC(26) NOT NULL,
  781. tx_pos NUMERIC(10) NOT NULL,
  782. satoshi_seconds_destroyed NUMERIC(28),
  783. PRIMARY KEY (block_id, tx_id),
  784. UNIQUE (block_id, tx_pos),
  785. FOREIGN KEY (block_id)
  786. REFERENCES block (block_id),
  787. FOREIGN KEY (tx_id)
  788. REFERENCES tx (tx_id)
  789. )""",
  790. """CREATE INDEX x_block_tx_tx ON block_tx (tx_id)""",
  791.  
  792. # A public key for sending bitcoins. PUBKEY_HASH is derivable from a
  793. # Bitcoin or Testnet address.
  794. """CREATE TABLE pubkey (
  795. pubkey_id NUMERIC(26) NOT NULL PRIMARY KEY,
  796. pubkey_hash BIT(160) UNIQUE NOT NULL,
  797. pubkey BIT(520) NULL
  798. )""",
  799.  
  800. # A transaction out-point.
  801. """CREATE TABLE txout (
  802. txout_id NUMERIC(26) NOT NULL PRIMARY KEY,
  803. tx_id NUMERIC(26) NOT NULL,
  804. txout_pos NUMERIC(10) NOT NULL,
  805. txout_value NUMERIC(30) NOT NULL,
  806. txout_scriptPubKey BIT VARYING(80000),
  807. pubkey_id NUMERIC(26),
  808. UNIQUE (tx_id, txout_pos),
  809. FOREIGN KEY (pubkey_id)
  810. REFERENCES pubkey (pubkey_id)
  811. )""",
  812. """CREATE INDEX x_txout_pubkey ON txout (pubkey_id)""",
  813.  
  814. # A transaction in-point.
  815. """CREATE TABLE txin (
  816. txin_id NUMERIC(26) NOT NULL PRIMARY KEY,
  817. tx_id NUMERIC(26) NOT NULL,
  818. txin_pos NUMERIC(10) NOT NULL,
  819. txout_id NUMERIC(26),
  820. txin_scriptSig BIT VARYING(80000),
  821. txin_sequence NUMERIC(10),
  822. UNIQUE (tx_id, txin_pos),
  823. FOREIGN KEY (tx_id)
  824. REFERENCES tx (tx_id)
  825. )""",
  826. """CREATE INDEX x_txin_txout ON txin (txout_id)""",
  827.  
  828. # While TXIN.TXOUT_ID can not be found, we must remember TXOUT_POS,
  829. # a.k.a. PREVOUT_N.
  830. """CREATE TABLE unlinked_txin (
  831. txin_id NUMERIC(26) NOT NULL PRIMARY KEY,
  832. txout_tx_hash BIT(256) NOT NULL,
  833. txout_pos NUMERIC(10) NOT NULL,
  834. FOREIGN KEY (txin_id) REFERENCES txin (txin_id)
  835. )""",
  836. """CREATE INDEX x_unlinked_txin_outpoint
  837. ON unlinked_txin (txout_tx_hash, txout_pos)""",
  838.  
  839. """CREATE TABLE block_txin (
  840. block_id NUMERIC(14) NOT NULL,
  841. txin_id NUMERIC(26) NOT NULL,
  842. out_block_id NUMERIC(14) NOT NULL,
  843. PRIMARY KEY (block_id, txin_id),
  844. FOREIGN KEY (block_id) REFERENCES block (block_id),
  845. FOREIGN KEY (txin_id) REFERENCES txin (txin_id),
  846. FOREIGN KEY (out_block_id) REFERENCES block (block_id)
  847. )""",
  848.  
  849. store._ddl['chain_summary'],
  850. store._ddl['txout_detail'],
  851. store._ddl['txin_detail'],
  852. store._ddl['txout_approx'],
  853. ):
  854. try:
  855. store.ddl(stmt)
  856. except:
  857. print "Failed:", stmt
  858. raise
  859.  
  860. ins_magic = """INSERT INTO magic (magic_id, magic, magic_name)
  861. VALUES (?, ?, ?)"""
  862. ins_policy = """INSERT INTO policy (policy_id, policy_name)
  863. VALUES (?, ?)"""
  864. ins_chain = """
  865. INSERT INTO chain (
  866. chain_id, magic_id, policy_id, chain_name, chain_code3,
  867. chain_address_version
  868. ) VALUES (?, ?, ?, ?, ?, ?)"""
  869.  
  870. # Some public data.
  871. store.sql(ins_magic, (BITCOIN_MAGIC_ID, store.binin(BITCOIN_MAGIC),
  872. "Bitcoin"))
  873. store.sql(ins_magic, (TESTNET_MAGIC_ID, store.binin(TESTNET_MAGIC),
  874. "Testnet"))
  875. store.sql(ins_magic, (NAMECOIN_MAGIC_ID,
  876. store.binin(NAMECOIN_MAGIC), "Namecoin"))
  877. store.sql(ins_magic, (WEEDS_MAGIC_ID,
  878. store.binin(WEEDS_MAGIC), "Weedsnet"))
  879. store.sql(ins_magic, (BEER_MAGIC_ID,
  880. store.binin(BEER_MAGIC), "BeerTokens"))
  881. store.sql(ins_magic, (SLC_MAGIC_ID,
  882. store.binin(SLC_MAGIC), "SolidCoins"))
  883. store.sql(ins_magic, (LC_MAGIC_ID,
  884. store.binin(LC_MAGIC), "LiteCoin"))
  885. store.sql(ins_policy, (BITCOIN_POLICY_ID, "Bitcoin policy"))
  886. store.sql(ins_policy, (TESTNET_POLICY_ID, "Testnet policy"))
  887. store.sql(ins_policy, (NAMECOIN_POLICY_ID, "Namecoin policy"))
  888. store.sql(ins_policy, (WEEDS_POLICY_ID, "Weedsnet policy"))
  889. store.sql(ins_policy, (BEER_POLICY_ID, "BeerTokens policy"))
  890. store.sql(ins_policy, (SLC_POLICY_ID, "SolidCoins policy"))
  891. store.sql(ins_policy, (LC_POLICY_ID, "LiteCoin policy"))
  892. store.sql(ins_chain,
  893. (BITCOIN_CHAIN_ID, BITCOIN_MAGIC_ID, BITCOIN_POLICY_ID,
  894. 'Bitcoin', 'BTC', store.binin(BITCOIN_ADDRESS_VERSION)))
  895. store.sql(ins_chain,
  896. (TESTNET_CHAIN_ID, TESTNET_MAGIC_ID, TESTNET_POLICY_ID,
  897. 'Testnet', 'BC0', store.binin(TESTNET_ADDRESS_VERSION)))
  898. store.sql(ins_chain,
  899. (NAMECOIN_CHAIN_ID, NAMECOIN_MAGIC_ID, NAMECOIN_POLICY_ID,
  900. 'Namecoin', 'NMC', store.binin(NAMECOIN_ADDRESS_VERSION)))
  901. store.sql(ins_chain,
  902. (WEEDS_CHAIN_ID, WEEDS_MAGIC_ID, WEEDS_POLICY_ID,
  903. 'Weedsnet', 'WDS', store.binin(WEEDS_ADDRESS_VERSION)))
  904. store.sql(ins_chain,
  905. (BEER_CHAIN_ID, BEER_MAGIC_ID, BEER_POLICY_ID,
  906. 'BeerTokens', 'BER', store.binin(BEER_ADDRESS_VERSION)))
  907. store.sql(ins_chain,
  908. (SLC_CHAIN_ID, SLC_MAGIC_ID, SLC_POLICY_ID,
  909. 'SolidCoins', 'SLC', store.binin(SLC_ADDRESS_VERSION)))
  910. store.sql(ins_chain,
  911. (LC_CHAIN_ID, LC_MAGIC_ID, LC_POLICY_ID,
  912. 'LiteCoin', 'LC', store.binin(LC_ADDRESS_VERSION)))
  913.  
  914. store.save_config()
  915. store.commit()
  916.  
  917. def configure(store):
  918. store.config = {}
  919.  
  920. store.configure_ddl_implicit_commit()
  921. store.configure_create_table_epilogue()
  922. store.configure_max_varchar()
  923. store.configure_clob_type()
  924.  
  925. for val in (
  926. ['str', 'bytearray', 'buffer', 'hex', '']
  927. if store.args.binary_type is None else
  928. [ store.args.binary_type, '' ]):
  929.  
  930. if val == '':
  931. raise Exception(
  932. "No known binary data representation works"
  933. if store.args.binary_type is None else
  934. "Binary type " + store.args.binary_type + " fails test")
  935. store.config['binary_type'] = val
  936. store._set_sql_flavour()
  937. if store._test_binary_type():
  938. print "binary_type=%s" % (val,)
  939. break
  940.  
  941. for val in ['int', 'decimal', 'str', '']:
  942. if val == '':
  943. raise Exception(
  944. "No known large integer representation works")
  945. store.config['int_type'] = val
  946. store._set_sql_flavour()
  947. if store._test_int_type():
  948. print "int_type=%s" % (val,)
  949. break
  950.  
  951. for val in ['update', '']:
  952. if val == '':
  953. raise Exception(
  954. "No known sequence type works")
  955. store.config['sequence_type'] = val
  956. store._set_sql_flavour()
  957. if store._test_sequence_type():
  958. print "sequence_type=%s" % (val,)
  959. break
  960.  
  961. def _drop_if_exists(store, otype, name):
  962. try:
  963. store.sql("DROP " + otype + " " + name)
  964. store.commit()
  965. except store.module.DatabaseError:
  966. store.rollback()
  967.  
  968. def _drop_table_if_exists(store, obj):
  969. store._drop_if_exists("TABLE", obj)
  970. def _drop_view_if_exists(store, obj):
  971. store._drop_if_exists("VIEW", obj)
  972. def _drop_sequence_if_exists(store, obj):
  973. store._drop_if_exists("SEQUENCE", obj)
  974.  
  975. def configure_ddl_implicit_commit(store):
  976. if 'create_table_epilogue' not in store.config:
  977. store.config['create_table_epilogue'] = ''
  978. for val in ['true', 'false']:
  979. store.config['ddl_implicit_commit'] = val
  980. store._set_sql_flavour()
  981. if store._test_ddl():
  982. print "ddl_implicit_commit=%s" % (val,)
  983. return
  984. raise Exception("Can not test for DDL implicit commit.")
  985.  
  986. def _test_ddl(store):
  987. """Test whether DDL performs implicit commit."""
  988. store._drop_table_if_exists("abe_test_1")
  989. try:
  990. store.ddl(
  991. "CREATE TABLE abe_test_1 ("
  992. " abe_test_1_id NUMERIC(12) NOT NULL PRIMARY KEY,"
  993. " foo VARCHAR(10))")
  994. store.rollback()
  995. store.selectall("SELECT MAX(abe_test_1_id) FROM abe_test_1")
  996. return True
  997. except store.module.DatabaseError, e:
  998. store.rollback()
  999. return False
  1000. except Exception, e:
  1001. print "_test_ddl:", store.config['ddl_implicit_commit'] + ":", e
  1002. store.rollback()
  1003. return False
  1004. finally:
  1005. store._drop_table_if_exists("abe_test_1")
  1006.  
  1007. def configure_create_table_epilogue(store):
  1008. for val in ['', ' ENGINE=InnoDB']:
  1009. store.config['create_table_epilogue'] = val
  1010. store._set_sql_flavour()
  1011. if store._test_transaction():
  1012. print "create_table_epilogue='%s'" % (val,)
  1013. return
  1014. raise Exception("Can not create a transactional table.")
  1015.  
  1016. def _test_transaction(store):
  1017. """Test whether CREATE TABLE needs ENGINE=InnoDB for rollback."""
  1018. store._drop_table_if_exists("abe_test_1")
  1019. try:
  1020. store.ddl(
  1021. "CREATE TABLE abe_test_1 (a NUMERIC(12))")
  1022. store.sql("INSERT INTO abe_test_1 (a) VALUES (4)")
  1023. store.commit()
  1024. store.sql("INSERT INTO abe_test_1 (a) VALUES (5)")
  1025. store.rollback()
  1026. data = [int(row[0]) for row in store.selectall(
  1027. "SELECT a FROM abe_test_1")]
  1028. return data == [4]
  1029. except store.module.DatabaseError, e:
  1030. store.rollback()
  1031. return False
  1032. except Exception, e:
  1033. print "_test_transaction:", \
  1034. store.config['create_table_epilogue'] + ":", e
  1035. store.rollback()
  1036. return False
  1037. finally:
  1038. store._drop_table_if_exists("abe_test_1")
  1039.  
  1040. def configure_max_varchar(store):
  1041. """Find the maximum VARCHAR width, up to 0xffffffff"""
  1042. lo = 0
  1043. hi = 1 << 32
  1044. store.config['max_varchar'] = hi
  1045. store._drop_table_if_exists("abe_test_1")
  1046. while True:
  1047. mid = (lo + hi) / 2
  1048. store._drop_table_if_exists("abe_test_1")
  1049. try:
  1050. store.ddl("""CREATE TABLE abe_test_1
  1051. (a VARCHAR(%d), b VARCHAR(%d))""" % (mid, mid))
  1052. store.sql("INSERT INTO abe_test_1 (a, b) VALUES ('x', 'y')")
  1053. row = store.selectrow("SELECT a, b FROM abe_test_1")
  1054. if [x for x in row] == ['x', 'y']:
  1055. lo = mid
  1056. else:
  1057. hi = mid
  1058. except store.module.DatabaseError, e:
  1059. store.rollback()
  1060. hi = mid
  1061. except Exception, e:
  1062. print "configure_max_varchar: %d:" % (mid,), e
  1063. store.rollback()
  1064. hi = mid
  1065. if lo + 1 == hi:
  1066. store.config['max_varchar'] = str(lo)
  1067. print "max_varchar=" + store.config['max_varchar']
  1068. break
  1069. store._drop_table_if_exists("abe_test_1")
  1070.  
  1071. def configure_clob_type(store):
  1072. """Find the name of the CLOB type, if any."""
  1073. long_str = 'x' * 10000
  1074. store._drop_table_if_exists("abe_test_1")
  1075. for val in ['CLOB', 'LONGTEXT', 'TEXT', 'LONG']:
  1076. try:
  1077. store.ddl("CREATE TABLE abe_test_1 (a %s)" % (val,))
  1078. store.sql("INSERT INTO abe_test_1 (a) VALUES (?)",
  1079. (store.binin(long_str),))
  1080. out = store.selectrow("SELECT a FROM abe_test_1")[0]
  1081. if store.binout(out) == long_str:
  1082. store.config['clob_type'] = val
  1083. print "clob_type=" + val
  1084. return
  1085. else:
  1086. print "out=" + repr(out)
  1087. except store.module.DatabaseError, e:
  1088. print "configure_clob_type: %s:" % (val,), e
  1089. store.rollback()
  1090. except Exception, e:
  1091. print "configure_clob_type: %s:" % (val,), e
  1092. try:
  1093. store.rollback()
  1094. except:
  1095. # Fetching a CLOB really messes up Easysoft ODBC Oracle.
  1096. store.reconnect()
  1097. finally:
  1098. store._drop_table_if_exists("abe_test_1")
  1099. warnings.warn("No native type found for CLOB.")
  1100. store.config['clob_type'] = NO_CLOB
  1101.  
  1102. def _test_binary_type(store):
  1103. store._drop_table_if_exists("abe_test_1")
  1104. try:
  1105. store.ddl(
  1106. "CREATE TABLE abe_test_1 (test_id NUMERIC(2) NOT NULL PRIMARY KEY,"
  1107. " test_bit BIT(256), test_varbit BIT VARYING(80000))")
  1108. val = str(''.join(map(chr, range(32))))
  1109. store.sql("INSERT INTO abe_test_1 (test_id, test_bit, test_varbit)"
  1110. " VALUES (?, ?, ?)",
  1111. (1, store.hashin(val), store.binin(val)))
  1112. (bit, vbit) = store.selectrow(
  1113. "SELECT test_bit, test_varbit FROM abe_test_1")
  1114. if store.hashout(bit) != val:
  1115. return False
  1116. if store.binout(vbit) != val:
  1117. return False
  1118. return True
  1119. except store.module.DatabaseError, e:
  1120. store.rollback()
  1121. return False
  1122. except Exception, e:
  1123. print "_test_binary_type:", store.config['binary_type'] + ":", e
  1124. store.rollback()
  1125. return False
  1126. finally:
  1127. store._drop_table_if_exists("abe_test_1")
  1128.  
  1129. def _test_int_type(store):
  1130. store._drop_view_if_exists("abe_test_v1")
  1131. store._drop_table_if_exists("abe_test_1")
  1132. try:
  1133. store.ddl(
  1134. """CREATE TABLE abe_test_1 (test_id NUMERIC(2) NOT NULL PRIMARY KEY,
  1135. txout_value NUMERIC(30), i2 NUMERIC(20))""")
  1136. store.ddl(
  1137. """CREATE VIEW abe_test_v1 AS SELECT test_id,
  1138. txout_value txout_approx_value, txout_value i1, i2
  1139. FROM abe_test_1""")
  1140. v1 = 12345678901234567890
  1141. v2 = 1234567890
  1142. store.sql("INSERT INTO abe_test_1 (test_id, txout_value, i2)"
  1143. " VALUES (?, ?, ?)",
  1144. (1, store.intin(v1), v2))
  1145. store.commit()
  1146. prod, o1 = store.selectrow(
  1147. "SELECT txout_approx_value * i2, i1 FROM abe_test_v1")
  1148. prod = int(prod)
  1149. o1 = int(o1)
  1150. if prod < v1 * v2 * 1.0001 and prod > v1 * v2 * 0.9999 and o1 == v1:
  1151. return True
  1152. return False
  1153. except store.module.DatabaseError, e:
  1154. store.rollback()
  1155. return False
  1156. except Exception, e:
  1157. print "_test_int_type:", store.config['int_type'] + ":", e
  1158. store.rollback()
  1159. return False
  1160. finally:
  1161. store._drop_view_if_exists("abe_test_v1")
  1162. store._drop_table_if_exists("abe_test_1")
  1163.  
  1164. def _test_sequence_type(store):
  1165. store._drop_table_if_exists("abe_test_1")
  1166. try:
  1167. store.ddl(
  1168. """CREATE TABLE abe_test_1 (
  1169. abe_test_1_id NUMERIC(12) NOT NULL PRIMARY KEY,
  1170. foo VARCHAR(10))""")
  1171. id1 = store.new_id('abe_test_1')
  1172. id2 = store.new_id('abe_test_1')
  1173. if int(id1) != int(id2):
  1174. return True
  1175. return False
  1176. except store.module.DatabaseError, e:
  1177. print "_test_sequence_type:", store.config['sequence_type'] + ":", e
  1178. store.rollback()
  1179. return False
  1180. except Exception, e:
  1181. print "_test_sequence_type:", store.config['sequence_type'] + ":", e
  1182. store.rollback()
  1183. return False
  1184. finally:
  1185. store._drop_table_if_exists("abe_test_1")
  1186.  
  1187. def save_config(store):
  1188. store.config['schema_version'] = SCHEMA_VERSION
  1189. for name in store.config.keys():
  1190. store.save_configvar(name)
  1191.  
  1192. def save_configvar(store, name):
  1193. store.sql("INSERT INTO configvar (configvar_name, configvar_value)"
  1194. " VALUES (?, ?)", (name, store.config[name]))
  1195.  
  1196. def set_configvar(store, name, value):
  1197. store.config[name] = value
  1198. store.save_configvar(name)
  1199.  
  1200. def _get_block(store, block_id):
  1201. return store._blocks.get(int(block_id))
  1202.  
  1203. def _put_block(store, block_id, prev_id, height):
  1204. block = {
  1205. 'prev_id': None if prev_id is None else int(prev_id),
  1206. 'height': None if height is None else int(height),
  1207. 'in_longest_chains': set()}
  1208. store._blocks[int(block_id)] = block
  1209. return block
  1210.  
  1211. def _load_block(store, block_id):
  1212. block = store._get_block(block_id)
  1213. if block is None:
  1214. row = store.selectrow("""
  1215. SELECT prev_block_id, block_height
  1216. FROM block
  1217. WHERE block_id = ?""", (block_id,))
  1218. if row is None:
  1219. return None
  1220. prev_id, height = row
  1221. block = store._put_block(block_id, prev_id, height)
  1222. for row in store.selectall("""
  1223. SELECT chain_id
  1224. FROM chain_candidate
  1225. WHERE block_id = ? AND in_longest = 1""", (block_id,)):
  1226. (chain_id,) = row
  1227. store._add_block_chain(block_id, chain_id)
  1228. return block
  1229.  
  1230. def _update_block(store, block_id, prev_id, height):
  1231. block = store._get_block(block_id)
  1232. if block:
  1233. block['prev_id'] = int(prev_id)
  1234. block['height'] = int(height)
  1235.  
  1236. def _add_block_chain(store, block_id, chain_id):
  1237. block = store._get_block(block_id)
  1238. if block:
  1239. block['in_longest_chains'].add(int(chain_id))
  1240.  
  1241. def _remove_block_chain(store, block_id, chain_id):
  1242. block = store._get_block(block_id)
  1243. if block:
  1244. block['in_longest_chains'].remove(int(chain_id))
  1245.  
  1246. def is_descended_from(store, block_id, ancestor_id):
  1247. # ret = store._is_descended_from(block_id, ancestor_id)
  1248. # print block_id, "is" + ('' if ret else ' NOT'), "descended from", ancestor_id
  1249. # return ret
  1250. # def _is_descended_from(store, block_id, ancestor_id):
  1251. if block_id == ancestor_id:
  1252. return True
  1253. block = store._load_block(block_id)
  1254. if block['prev_id'] == ancestor_id:
  1255. return True
  1256. ancestor = store._load_block(ancestor_id)
  1257. chains = ancestor['in_longest_chains']
  1258. while True:
  1259. #print "is_descended_from", ancestor_id, block
  1260. if chains.intersection(block['in_longest_chains']):
  1261. return ancestor['height'] <= block['height']
  1262. if block['in_longest_chains'] - chains:
  1263. return False
  1264. if block['prev_id'] is None:
  1265. return None
  1266. block = store._load_block(block['prev_id'])
  1267. if block['prev_id'] == ancestor_id:
  1268. return True
  1269. if block['height'] <= ancestor['height']:
  1270. return False
  1271.  
  1272. def find_prev(store, hash):
  1273. row = store.selectrow("""
  1274. SELECT block_id, block_height, block_chain_work,
  1275. block_total_satoshis, block_total_seconds,
  1276. block_satoshi_seconds, block_total_ss, block_nTime
  1277. FROM block
  1278. WHERE block_hash=?""", (store.hashin(hash),))
  1279. if row is None:
  1280. return (None, None, None, None, None, None, None, None)
  1281. (id, height, chain_work, satoshis, seconds, satoshi_seconds,
  1282. total_ss, nTime) = row
  1283. return (id, None if height is None else int(height),
  1284. store.binout_int(chain_work),
  1285. None if satoshis is None else int(satoshis),
  1286. None if seconds is None else int(seconds),
  1287. None if satoshi_seconds is None else int(satoshi_seconds),
  1288. None if total_ss is None else int(total_ss),
  1289. int(nTime))
  1290.  
  1291. def import_block(store, b):
  1292.  
  1293. # Get a new block ID.
  1294. block_id = store.new_id("block")
  1295. b['block_id'] = block_id
  1296.  
  1297. # Import new transactions.
  1298. b['value_in'] = 0
  1299. b['value_out'] = 0
  1300. for pos in xrange(len(b['transactions'])):
  1301. tx = b['transactions'][pos]
  1302. if 'hash' not in tx:
  1303. tx['hash'] = util.double_sha256(tx['tx'])
  1304. tx['tx_id'] = (store.tx_find_id_and_value(tx) or
  1305. store.import_tx(tx, pos == 0))
  1306. b['value_in'] += tx['value_in']
  1307. b['value_out'] += tx['value_out']
  1308.  
  1309. # Look for the parent block.
  1310. hashPrev = b['hashPrev']
  1311. is_genesis = hashPrev == GENESIS_HASH_PREV
  1312. (prev_block_id, prev_height, prev_work, prev_satoshis,
  1313. prev_seconds, prev_ss, prev_total_ss, prev_nTime) = (
  1314. (None, -1, 0, 0, 0, 0, 0, b['nTime'])
  1315. if is_genesis else
  1316. store.find_prev(hashPrev))
  1317.  
  1318. b['prev_block_id'] = prev_block_id
  1319. b['height'] = None if prev_height is None else prev_height + 1
  1320. b['chain_work'] = util.calculate_work(prev_work, b['nBits'])
  1321.  
  1322. store._put_block(block_id, prev_block_id, b['height'])
  1323.  
  1324. if prev_seconds is None:
  1325. b['seconds'] = None
  1326. else:
  1327. b['seconds'] = prev_seconds + b['nTime'] - prev_nTime
  1328. if prev_satoshis is None:
  1329. b['satoshis'] = None
  1330. else:
  1331. b['satoshis'] = prev_satoshis + b['value_out'] - b['value_in']
  1332.  
  1333. # Insert the block table row.
  1334. store.sql(
  1335. """INSERT INTO block (
  1336. block_id, block_hash, block_version, block_hashMerkleRoot,
  1337. block_nTime, block_nBits, block_nNonce, block_height,
  1338. prev_block_id, block_chain_work, block_value_in,
  1339. block_value_out, block_total_satoshis,
  1340. block_total_seconds, block_num_tx
  1341. ) VALUES (
  1342. ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?
  1343. )""",
  1344. (block_id, store.hashin(b['hash']), store.intin(b['version']),
  1345. store.hashin(b['hashMerkleRoot']), store.intin(b['nTime']),
  1346. store.intin(b['nBits']), store.intin(b['nNonce']),
  1347. b['height'], prev_block_id,
  1348. store.binin_int(b['chain_work'], WORK_BITS),
  1349. store.intin(b['value_in']), store.intin(b['value_out']),
  1350. store.intin(b['satoshis']), store.intin(b['seconds']),
  1351. len(b['transactions'])))
  1352.  
  1353. # List the block's transactions in block_tx.
  1354. for tx_pos in xrange(len(b['transactions'])):
  1355. tx = b['transactions'][tx_pos]
  1356. store.sql("""
  1357. INSERT INTO block_tx
  1358. (block_id, tx_id, tx_pos)
  1359. VALUES (?, ?, ?)""",
  1360. (block_id, tx['tx_id'], tx_pos))
  1361. print "block_tx", block_id, tx['tx_id']
  1362.  
  1363. # Create rows in block_txin.
  1364. for row in store.selectall("""
  1365. SELECT txin.txin_id, obt.block_id
  1366. FROM block_tx bt
  1367. JOIN txin ON (txin.tx_id = bt.tx_id)
  1368. JOIN txout ON (txin.txout_id = txout.txout_id)
  1369. JOIN block_tx obt ON (txout.tx_id = obt.tx_id)
  1370. WHERE bt.block_id = ?""", (block_id,)):
  1371. (txin_id, oblock_id) = row
  1372. if store.is_descended_from(block_id, oblock_id):
  1373. store.sql("""
  1374. INSERT INTO block_txin (block_id, txin_id, out_block_id)
  1375. VALUES (?, ?, ?)""",
  1376. (block_id, txin_id, oblock_id))
  1377.  
  1378. b['ss_destroyed'] = store._get_block_ss_destroyed(
  1379. block_id, b['nTime'],
  1380. map(lambda tx: tx['tx_id'], b['transactions']))
  1381.  
  1382. if prev_satoshis is not None:
  1383. ss_created = prev_satoshis * (b['nTime'] - prev_nTime)
  1384. b['ss'] = prev_ss + ss_created - b['ss_destroyed']
  1385. b['total_ss'] = prev_total_ss + ss_created
  1386.  
  1387. store.sql("""
  1388. UPDATE block
  1389. SET block_satoshi_seconds = ?,
  1390. block_total_ss = ?,
  1391. block_ss_destroyed = ?
  1392. WHERE block_id = ?""",
  1393. (store.intin(b['ss']),
  1394. store.intin(b['total_ss']),
  1395. store.intin(b['ss_destroyed']),
  1396. block_id))
  1397.  
  1398. # Store the inverse hashPrev relationship or mark the block as
  1399. # an orphan.
  1400. if prev_block_id:
  1401. store.sql("""
  1402. INSERT INTO block_next (block_id, next_block_id)
  1403. VALUES (?, ?)""", (prev_block_id, block_id))
  1404. elif not is_genesis:
  1405. store.sql("INSERT INTO orphan_block (block_id, block_hashPrev)" +
  1406. " VALUES (?, ?)", (block_id, store.hashin(b['hashPrev'])))
  1407.  
  1408. for row in store.selectall("""
  1409. SELECT block_id FROM orphan_block WHERE block_hashPrev = ?""",
  1410. (store.hashin(b['hash']),)):
  1411. (orphan_id,) = row
  1412. store.sql("UPDATE block SET prev_block_id = ? WHERE block_id = ?",
  1413. (block_id, orphan_id))
  1414. store.sql("""
  1415. INSERT INTO block_next (block_id, next_block_id)
  1416. VALUES (?, ?)""", (block_id, orphan_id))
  1417. store.sql("DELETE FROM orphan_block WHERE block_id = ?",
  1418. (orphan_id,))
  1419.  
  1420. b['top'], new_work = store.adopt_orphans(b, 0)
  1421.  
  1422. return block_id
  1423.  
  1424. def _get_block_ss_destroyed(store, block_id, nTime, tx_ids):
  1425. block_ss_destroyed = 0
  1426. for tx_id in tx_ids:
  1427. destroyed = int(store.selectrow("""
  1428. SELECT COALESCE(SUM(txout_approx.txout_approx_value *
  1429. (? - b.block_nTime)), 0)
  1430. FROM block_txin bti
  1431. JOIN txin ON (bti.txin_id = txin.txin_id)
  1432. JOIN txout_approx ON (txin.txout_id = txout_approx.txout_id)
  1433. JOIN block_tx obt ON (txout_approx.tx_id = obt.tx_id)
  1434. JOIN block b ON (obt.block_id = b.block_id)
  1435. WHERE bti.block_id = ? AND txin.tx_id = ?""",
  1436. (nTime, block_id, tx_id))[0])
  1437. block_ss_destroyed += destroyed
  1438. store.sql("""
  1439. UPDATE block_tx
  1440. SET satoshi_seconds_destroyed = ?
  1441. WHERE block_id = ?
  1442. AND tx_id = ?""",
  1443. (store.intin(destroyed), block_id, tx_id))
  1444. return block_ss_destroyed
  1445.  
  1446. # Propagate cumulative values to descendant blocks. Return info
  1447. # about the longest chain rooted at b.
  1448. def adopt_orphans(store, b, orphan_work):
  1449. block_id = b['block_id']
  1450. next_blocks = store.find_next_blocks(block_id)
  1451. if not next_blocks:
  1452. return b, orphan_work
  1453.  
  1454. height = None if b['height'] is None else b['height'] + 1
  1455. best = None
  1456. best_work = orphan_work
  1457.  
  1458. for row in store.selectall("""
  1459. SELECT bn.next_block_id, b.block_nBits,
  1460. b.block_value_out - b.block_value_in, block_nTime
  1461. FROM block_next bn
  1462. JOIN block b ON (bn.next_block_id = b.block_id)
  1463. WHERE bn.block_id = ?""", (block_id,)):
  1464. next_id, nBits, generated, nTime = row
  1465. nBits = int(nBits)
  1466. generated = None if generated is None else int(generated)
  1467. nTime = int(nTime)
  1468. new_work = util.calculate_work(orphan_work, nBits)
  1469.  
  1470. if b['chain_work'] is None:
  1471. chain_work = None
  1472. else:
  1473. chain_work = b['chain_work'] + new_work - orphan_work
  1474.  
  1475. if b['seconds'] is None:
  1476. seconds = None
  1477. else:
  1478. seconds = b['seconds'] + nTime - b['nTime']
  1479.  
  1480. if b['satoshis'] is None or generated is None:
  1481. satoshis = None
  1482. else:
  1483. satoshis = b['satoshis'] + generated
  1484.  
  1485. if b['ss'] is None or b['satoshis'] is None or b['seconds'] is None:
  1486. destroyed = None
  1487. ss = None
  1488. else:
  1489. tx_ids = map(
  1490. lambda row: row[0],
  1491. store.selectall("""
  1492. SELECT tx_id
  1493. FROM block_tx
  1494. WHERE block_id = ?""", (next_id,)))
  1495. destroyed = store._get_block_ss_destroyed(
  1496. next_id, nTime, tx_ids)
  1497. ss = b['ss'] + b['satoshis'] * (nTime - b['nTime']) - destroyed
  1498.  
  1499. store.sql("""
  1500. UPDATE block
  1501. SET block_height = ?,
  1502. block_chain_work = ?,
  1503. block_total_seconds = ?,
  1504. block_total_satoshis = ?,
  1505. block_satoshi_seconds = ?,
  1506. block_ss_destroyed = ?
  1507. WHERE block_id = ?""",
  1508. (height, store.binin_int(chain_work, WORK_BITS),
  1509. store.intin(seconds), store.intin(satoshis),
  1510. store.intin(ss), store.intin(destroyed), next_id))
  1511.  
  1512. store._update_block(next_id, block_id, height)
  1513.  
  1514. if height is not None:
  1515. store.sql("""
  1516. UPDATE chain_candidate SET block_height = ?
  1517. WHERE block_id = ?""",
  1518. (height, next_id))
  1519.  
  1520. nb = {
  1521. "block_id": next_id,
  1522. "height": height,
  1523. "chain_work": chain_work,
  1524. "nTime": nTime,
  1525. "seconds": seconds,
  1526. "satoshis": satoshis,
  1527. "ss": ss}
  1528. ret, work = store.adopt_orphans(nb, new_work)
  1529.  
  1530. if work > best_work:
  1531. best = ret
  1532. best_work = work
  1533.  
  1534. return best, best_work
  1535.  
  1536. def tx_find_id_and_value(store, tx):
  1537. row = store.selectrow("""
  1538. SELECT tx.tx_id, SUM(txout.txout_value)
  1539. FROM tx
  1540. LEFT JOIN txout ON (tx.tx_id = txout.tx_id)
  1541. WHERE tx_hash = ?
  1542. GROUP BY tx.tx_id""",
  1543. (store.hashin(tx['hash']),))
  1544. if row:
  1545. tx_id, value_out = row
  1546. (value_in,) = store.selectrow("""
  1547. SELECT SUM(prevout.txout_value)
  1548. FROM txin
  1549. JOIN txout prevout ON (txin.txout_id = prevout.txout_id)
  1550. WHERE txin.tx_id = ?""", (tx_id,))
  1551. tx['value_in'] = 0 if value_in is None else int(value_in)
  1552. tx['value_out'] = 0 if value_out is None else int(value_out)
  1553. return tx_id
  1554.  
  1555. return None
  1556.  
  1557. def import_tx(store, tx, is_coinbase):
  1558. tx_id = store.new_id("tx")
  1559. dbhash = store.hashin(tx['hash'])
  1560. store.sql("""
  1561. INSERT INTO tx (tx_id, tx_hash, tx_version, tx_lockTime, tx_size)
  1562. VALUES (?, ?, ?, ?, ?)""",
  1563. (tx_id, dbhash, store.intin(tx['version']),
  1564. store.intin(tx['lockTime']), len(tx['tx'])))
  1565.  
  1566. # Import transaction outputs.
  1567. tx['value_out'] = 0
  1568. for pos in xrange(len(tx['txOut'])):
  1569. txout = tx['txOut'][pos]
  1570. tx['value_out'] += txout['value']
  1571. txout_id = store.new_id("txout")
  1572.  
  1573. pubkey_id = None
  1574. match = SCRIPT_ADDRESS_RE.match(txout['scriptPubKey'])
  1575. if match:
  1576. pubkey_id = store.pubkey_hash_to_id(match.group(1))
  1577. else:
  1578. match = SCRIPT_PUBKEY_RE.match(txout['scriptPubKey'])
  1579. if match:
  1580. pubkey_id = store.pubkey_to_id(match.group(1))
  1581.  
  1582. store.sql("""
  1583. INSERT INTO txout (
  1584. txout_id, tx_id, txout_pos, txout_value,
  1585. txout_scriptPubKey, pubkey_id
  1586. ) VALUES (?, ?, ?, ?, ?, ?)""",
  1587. (txout_id, tx_id, pos, store.intin(txout['value']),
  1588. store.binin(txout['scriptPubKey']), pubkey_id))
  1589. for row in store.selectall("""
  1590. SELECT txin_id
  1591. FROM unlinked_txin
  1592. WHERE txout_tx_hash = ?
  1593. AND txout_pos = ?""", (dbhash, pos)):
  1594. (txin_id,) = row
  1595. store.sql("UPDATE txin SET txout_id = ? WHERE txin_id = ?",
  1596. (txout_id, txin_id))
  1597. store.sql("DELETE FROM unlinked_txin WHERE txin_id = ?",
  1598. (txin_id,))
  1599.  
  1600. # Import transaction inputs.
  1601. tx['value_in'] = 0
  1602. for pos in xrange(len(tx['txIn'])):
  1603. txin = tx['txIn'][pos]
  1604. txin_id = store.new_id("txin")
  1605.  
  1606. if is_coinbase:
  1607. txout_id = None
  1608. else:
  1609. txout_id, value = store.lookup_txout(
  1610. txin['prevout_hash'], txin['prevout_n'])
  1611. tx['value_in'] += value
  1612.  
  1613. store.sql("""
  1614. INSERT INTO txin (
  1615. txin_id, tx_id, txin_pos, txout_id,
  1616. txin_scriptSig, txin_sequence
  1617. ) VALUES (?, ?, ?, ?, ?, ?)""",
  1618. (txin_id, tx_id, pos, txout_id,
  1619. store.binin(txin['scriptSig']),
  1620. store.intin(txin['sequence'])))
  1621. if not is_coinbase and txout_id is None:
  1622. store.sql("""
  1623. INSERT INTO unlinked_txin (
  1624. txin_id, txout_tx_hash, txout_pos
  1625. ) VALUES (?, ?, ?)""",
  1626. (txin_id, store.hashin(txin['prevout_hash']),
  1627. store.intin(txin['prevout_n'])))
  1628.  
  1629. # XXX Could populate PUBKEY.PUBKEY with txin scripts...
  1630. # or leave that to an offline process. Nothing in this program
  1631. # requires them.
  1632. return tx_id
  1633.  
  1634. # Called to indicate that the given block has the correct magic
  1635. # number and policy for the given chain. Updates CHAIN_CANDIDATE
  1636. # and CHAIN.CHAIN_LAST_BLOCK_ID as appropriate.
  1637. def offer_block_to_chain(store, b, chain_id):
  1638. if b['chain_work'] is None:
  1639. in_longest = 0
  1640. else:
  1641. # Do we produce a chain longer than the current chain?
  1642. # Query whether the new block (or its tallest descendant)
  1643. # beats the current chain_last_block_id. Also check
  1644. # whether the current best is our top, which indicates
  1645. # this block is in longest; this can happen in database
  1646. # repair scenarios.
  1647. row = store.selectrow("""
  1648. SELECT b.block_id, b.block_height, b.block_chain_work
  1649. FROM block b, chain c
  1650. WHERE c.chain_id = ?
  1651. AND b.block_id = c.chain_last_block_id""", (chain_id,))
  1652. if row:
  1653. loser_id, loser_height, loser_work = row
  1654. if loser_id <> b['top']['block_id'] and \
  1655. store.binout_int(loser_work) >= b['top']['chain_work']:
  1656. row = None
  1657. if row:
  1658. # New longest chain.
  1659. in_longest = 1
  1660. to_connect = []
  1661. to_disconnect = []
  1662. winner_id = b['top']['block_id']
  1663. winner_height = b['top']['height']
  1664. #print "start", winner_height, loser_height
  1665. while loser_height > winner_height:
  1666. to_disconnect.insert(0, loser_id)
  1667. loser_id = store.get_prev_block_id(loser_id)
  1668. loser_height -= 1
  1669. while winner_height > loser_height:
  1670. to_connect.insert(0, winner_id)
  1671. winner_id = store.get_prev_block_id(winner_id)
  1672. winner_height -= 1
  1673. #print "tie", loser_height, loser_id, winner_id
  1674. loser_height = None
  1675. while loser_id <> winner_id:
  1676. to_disconnect.insert(0, loser_id)
  1677. loser_id = store.get_prev_block_id(loser_id)
  1678. to_connect.insert(0, winner_id)
  1679. winner_id = store.get_prev_block_id(winner_id)
  1680. winner_height -= 1
  1681. for block_id in to_disconnect:
  1682. store.disconnect_block(block_id, chain_id)
  1683. for block_id in to_connect:
  1684. store.connect_block(block_id, chain_id)
  1685.  
  1686. elif b['hashPrev'] == GENESIS_HASH_PREV:
  1687. in_longest = 1 # Assume only one genesis block per chain. XXX
  1688. else:
  1689. in_longest = 0
  1690.  
  1691. store.sql("""
  1692. INSERT INTO chain_candidate (
  1693. chain_id, block_id, in_longest, block_height
  1694. ) VALUES (?, ?, ?, ?)""",
  1695. (chain_id, b['block_id'], in_longest, b['height']))
  1696. if in_longest == 1:
  1697. store._add_block_chain(b['block_id'], chain_id)
  1698.  
  1699. if in_longest > 0:
  1700. store.sql("""
  1701. UPDATE chain
  1702. SET chain_last_block_id = ?
  1703. WHERE chain_id = ?""", (b['top']['block_id'], chain_id))
  1704.  
  1705. def find_next_blocks(store, block_id):
  1706. ret = []
  1707. for row in store.selectall(
  1708. "SELECT next_block_id FROM block_next WHERE block_id = ?",
  1709. (block_id,)):
  1710. ret.append(row[0])
  1711. return ret
  1712.  
  1713. def get_prev_block_id(store, block_id):
  1714. return store.selectrow(
  1715. "SELECT prev_block_id FROM block WHERE block_id = ?",
  1716. (block_id,))[0]
  1717.  
  1718. def disconnect_block(store, block_id, chain_id):
  1719. #print "disconnect", block_id, chain_id
  1720. store.sql("""
  1721. UPDATE chain_candidate
  1722. SET in_longest = 0
  1723. WHERE block_id = ? AND chain_id = ?""",
  1724. (block_id, chain_id))
  1725. store._remove_block_chain(block_id, chain_id)
  1726.  
  1727. def connect_block(store, block_id, chain_id):
  1728. #print "connect", block_id, chain_id
  1729. store.sql("""
  1730. UPDATE chain_candidate
  1731. SET in_longest = 1
  1732. WHERE block_id = ? AND chain_id = ?""",
  1733. (block_id, chain_id))
  1734. store._add_block_chain(block_id, chain_id)
  1735.  
  1736. def lookup_txout(store, tx_hash, txout_pos):
  1737. row = store.selectrow("""
  1738. SELECT txout.txout_id, txout.txout_value
  1739. FROM txout, tx
  1740. WHERE txout.tx_id = tx.tx_id
  1741. AND tx.tx_hash = ?
  1742. AND txout.txout_pos = ?""",
  1743. (store.hashin(tx_hash), txout_pos))
  1744. return (None, 0) if row is None else (row[0], int(row[1]))
  1745.  
  1746. def pubkey_hash_to_id(store, pubkey_hash):
  1747. return store._pubkey_id(pubkey_hash, None)
  1748.  
  1749. def pubkey_to_id(store, pubkey):
  1750. pubkey_hash = util.pubkey_to_hash(pubkey)
  1751. return store._pubkey_id(pubkey_hash, pubkey)
  1752.  
  1753. def _pubkey_id(store, pubkey_hash, pubkey):
  1754. dbhash = store.binin(pubkey_hash) # binin, not hashin for 160-bit
  1755. row = store.selectrow("""
  1756. SELECT pubkey_id
  1757. FROM pubkey
  1758. WHERE pubkey_hash = ?""", (dbhash,))
  1759. if row:
  1760. return row[0]
  1761. pubkey_id = store.new_id("pubkey")
  1762. store.sql("""
  1763. INSERT INTO pubkey (pubkey_id, pubkey_hash, pubkey)
  1764. VALUES (?, ?, ?)""",
  1765. (pubkey_id, dbhash, store.binin(pubkey)))
  1766. return pubkey_id
  1767.  
  1768. def catch_up(store):
  1769. for dircfg in store.datadirs:
  1770. try:
  1771. store.catch_up_dir(dircfg)
  1772. except Exception, e:
  1773. import traceback
  1774. traceback.print_exc()
  1775. print ("Warning: failed to catch up %s: %s"
  1776. % (dircfg['dirname'], str(e))), dircfg
  1777. store.rollback()
  1778.  
  1779. # Load all blocks starting at the current file and offset.
  1780. def catch_up_dir(store, dircfg):
  1781. def open_blkfile():
  1782. store._refresh_dircfg(dircfg)
  1783. filename = store.blkfile_name(dircfg)
  1784. ds = BCDataStream.BCDataStream()
  1785. ds.map_file(open(filename, "rb"), 0)
  1786. return ds
  1787.  
  1788. try:
  1789. ds = open_blkfile()
  1790. except IOError, e:
  1791. print "Skipping datadir " + dircfg['dirname'] + ": " + str(e)
  1792. return
  1793.  
  1794. while (True):
  1795. store.import_blkdat(dircfg, ds)
  1796.  
  1797. # Try another file.
  1798. dircfg['blkfile_number'] += 1
  1799. try:
  1800. ds = open_blkfile()
  1801. except IOError:
  1802. # No more block files.
  1803. dircfg['blkfile_number'] -= 1
  1804. return
  1805.  
  1806. dircfg['blkfile_offset'] = 0
  1807.  
  1808. # Load all blocks from the given data stream.
  1809. def import_blkdat(store, dircfg, ds):
  1810. filenum = dircfg['blkfile_number']
  1811. ds.read_cursor = dircfg['blkfile_offset']
  1812. bytes_done = 0
  1813.  
  1814. while filenum == dircfg['blkfile_number']:
  1815. if ds.read_cursor + 8 > len(ds.input):
  1816. break
  1817.  
  1818. offset = ds.read_cursor
  1819. magic = ds.read_bytes(4)
  1820.  
  1821. # Assume blocks obey the respective policy if they get here.
  1822. chain_id = dircfg['chain_id']
  1823. if chain_id is not None:
  1824. pass
  1825. elif magic == BITCOIN_MAGIC:
  1826. chain_id = BITCOIN_CHAIN_ID
  1827. elif magic == TESTNET_MAGIC:
  1828. chain_id = TESTNET_CHAIN_ID
  1829. elif magic == NAMECOIN_MAGIC:
  1830. chain_id = NAMECOIN_CHAIN_ID
  1831. elif magic == WEEDS_MAGIC:
  1832. chain_id = WEEDS_CHAIN_ID
  1833. elif magic == BEER_MAGIC:
  1834. chain_id = BEER_CHAIN_ID
  1835. elif magic == SLC_MAGIC:
  1836. chain_id = SLC_CHAIN_ID
  1837. elif magic == SLC_MAGIC:
  1838. chain_id = SLC_CHAIN_ID
  1839. elif magic == LC_MAGIC:
  1840. chain_id = LC_CHAIN_ID
  1841. else:
  1842. filename = store.blkfile_name(dircfg)
  1843. print "chain not found for magic", repr(magic), \
  1844. "in block file", filename, "at offset", offset
  1845. print ("If file contents have changed, consider forcing a"
  1846. " rescan: UPDATE datadir SET blkfile_offset=0"
  1847. " WHERE dirname='%s'" % (dircfg['dirname'],))
  1848. ds.read_cursor = offset
  1849. break
  1850.  
  1851. length = ds.read_int32()
  1852. if ds.read_cursor + length > len(ds.input):
  1853. print "incomplete block of length", length
  1854. ds.read_cursor = offset
  1855. break
  1856.  
  1857. hash = util.double_sha256(
  1858. ds.input[ds.read_cursor : ds.read_cursor + 80])
  1859. # XXX should decode target and check hash against it to avoid
  1860. # loading garbage data.
  1861.  
  1862. chain_ids = set()
  1863. block_row = store.selectrow("""
  1864. SELECT block_id, block_height, block_chain_work,
  1865. block_nTime, block_total_seconds,
  1866. block_total_satoshis, block_satoshi_seconds
  1867. FROM block
  1868. WHERE block_hash = ?
  1869. """, (store.hashin(hash),))
  1870.  
  1871. if block_row:
  1872. # Block header already seen. Don't import the block,
  1873. # but try to add it to a chain.
  1874. # XXX Could rescan transactions in case we loaded an
  1875. # incomplete block or if operating under --rescan.
  1876. ds.read_cursor += length
  1877. else:
  1878. b = deserialize.parse_Block(ds)
  1879. b["hash"] = hash
  1880. store.import_block(b)
  1881.  
  1882. if chain_id is not None:
  1883.  
  1884. if block_row:
  1885. b = {
  1886. "block_id": block_row[0],
  1887. "height": block_row[1],
  1888. "chain_work": store.binout_int(block_row[2]),
  1889. "nTime": block_row[3],
  1890. "seconds": block_row[4],
  1891. "satoshis": block_row[5],
  1892. "ss": block_row[6]}
  1893. if store.selectrow("""
  1894. SELECT 1
  1895. FROM chain_candidate
  1896. WHERE block_id = ?
  1897. AND chain_id = ?""",
  1898. (b['block_id'], chain_id)):
  1899. print "block", b['block_id'], \
  1900. "already in chain", chain_id
  1901. b = None
  1902. else:
  1903. if b['height'] == 0:
  1904. b['hashPrev'] = GENESIS_HASH_PREV
  1905. else:
  1906. b['hashPrev'] = 'dummy' # Fool adopt_orphans.
  1907. b['top'], new_work = store.adopt_orphans(b, 0)
  1908.  
  1909. if b:
  1910. store.offer_block_to_chain(b, chain_id)
  1911.  
  1912. bytes_done += length
  1913. if bytes_done > store.commit_bytes :
  1914. print "commit"
  1915. store.save_blkfile_offset(dircfg, ds.read_cursor)
  1916. store.commit()
  1917. store._refresh_dircfg(dircfg)
  1918. bytes_done = 0
  1919.  
  1920. if bytes_done > 0:
  1921. store.save_blkfile_offset(dircfg, ds.read_cursor)
  1922. store.commit()
  1923.  
  1924. def blkfile_name(store, dircfg):
  1925. return os.path.join(dircfg['dirname'], "blk%04d.dat"
  1926. % (dircfg['blkfile_number'],))
  1927.  
  1928. def save_blkfile_offset(store, dircfg, offset):
  1929. store.sql("""
  1930. UPDATE datadir
  1931. SET blkfile_number = ?,
  1932. blkfile_offset = ?
  1933. WHERE datadir_id = ?""",
  1934. (dircfg['blkfile_number'], store.intin(offset),
  1935. dircfg['id']))
  1936. if store.cursor.rowcount == 0:
  1937. store.sql("""
  1938. INSERT INTO datadir (datadir_id, dirname, blkfile_number,
  1939. blkfile_offset, chain_id)
  1940. VALUES (?, ?, ?, ?, ?)""",
  1941. (dircfg['id'], dircfg['dirname'],
  1942. dircfg['blkfile_number'],
  1943. store.intin(offset), dircfg['chain_id']))
  1944. dircfg['blkfile_offset'] = offset
  1945.  
  1946. def _refresh_dircfg(store, dircfg):
  1947. row = store.selectrow("""
  1948. SELECT blkfile_number, blkfile_offset
  1949. FROM datadir
  1950. WHERE dirname = ?""", (dircfg['dirname'],))
  1951. if row:
  1952. number, offset = map(int, row)
  1953. if (number > dircfg['blkfile_number'] or
  1954. (number == dircfg['blkfile_number'] and
  1955. offset > dircfg['blkfile_offset'])):
  1956. print "skipped to", dircfg['dirname'], number, offset
  1957. dircfg['blkfile_number'] = number
  1958. dircfg['blkfile_offset'] = offset
  1959.  
  1960. def new(args):
  1961. return DataStore(args)
  1962.  
  1963.  
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement