import time, threading
-
class AbeStore(Datastore_class):
def __init__(self, config):
elif args.dbtype == 'psycopg2':
args.connect_args = { 'database' : config.get('database','database') }
+ coin = config.get('server', 'coin')
+ self.addrtype = 0
+ if coin == 'litecoin':
+ print 'Litecoin settings:'
+ datadir = config.get('server','datadir')
+ print ' datadir = ' + datadir
+ args.datadir = [{"dirname":datadir,"chain":"Litecoin","code3":"LTC","address_version":"\u0030"}]
+ print ' addrtype = 48'
+ self.addrtype = 48
+
Datastore_class.__init__(self,args)
+ # Use 1 (Bitcoin) if chain_id is not sent
+ self.chain_id = self.datadirs[0]["chain_id"] or 1
+ print 'Coin chain_id = %d' % self.chain_id
+
self.sql_limit = int( config.get('database','limit') )
self.tx_cache = {}
self.dblock = thread.allocate_lock()
self.last_tx_id = 0
+ self.known_mempool_hashes = []
def import_tx(self, tx, is_coinbase):
#print "WARNING: missing tx_in for tx", txid
continue
- address = hash_to_address(chr(0), _hash)
+ address = hash_to_address(chr(self.addrtype), _hash)
if self.tx_cache.has_key(address):
print "cache: invalidating", address
self.tx_cache.pop(address)
#print "WARNING: missing tx_out for tx", txid
continue
- address = hash_to_address(chr(0), _hash)
+ address = hash_to_address(chr(self.addrtype), _hash)
if self.tx_cache.has_key(address):
print "cache: invalidating", address
self.tx_cache.pop(address)
JOIN txout prevout ON (txin.txout_id = prevout.txout_id)
JOIN pubkey ON (pubkey.pubkey_id = prevout.pubkey_id)
WHERE pubkey.pubkey_hash = ?
+ AND cc.chain_id = ?
AND cc.in_longest = 1
- LIMIT ? """, (dbhash,self.sql_limit))
+ LIMIT ? """, (dbhash, self.chain_id, self.sql_limit))
if len(out)==self.sql_limit:
raise BaseException('limit reached')
JOIN txout ON (txout.tx_id = tx.tx_id)
JOIN pubkey ON (pubkey.pubkey_id = txout.pubkey_id)
WHERE pubkey.pubkey_hash = ?
+ AND cc.chain_id = ?
AND cc.in_longest = 1
- LIMIT ? """, (dbhash,self.sql_limit))
+ LIMIT ? """, (dbhash, self.chain_id, self.sql_limit))
if len(out)==self.sql_limit:
raise BaseException('limit reached')
continue
# discard transactions that are too old
- if self.last_tx_id - tx_id > 10000:
+ if self.last_tx_id - tx_id > 50000:
print "discarding tx id", tx_id
continue
if not _hash:
#print "WARNING: missing tx_in for tx", tx_id, addr
continue
- address = hash_to_address(chr(0), _hash)
+ address = hash_to_address(chr(self.addrtype), _hash)
txinputs.append(address)
txpoint['inputs'] = txinputs
txoutputs = []
if not _hash:
#print "WARNING: missing tx_out for tx", tx_id, addr
continue
- address = hash_to_address(chr(0), _hash)
+ address = hash_to_address(chr(self.addrtype), _hash)
txoutputs.append(address)
txpoint['outputs'] = txoutputs
return status
+ def get_block_header(self, block_height):
+ out = self.safe_sql("""
+ SELECT
+ block_hash,
+ block_version,
+ block_hashMerkleRoot,
+ block_nTime,
+ block_nBits,
+ block_nNonce,
+ block_height,
+ prev_block_hash,
+ block_id
+ FROM chain_summary
+ WHERE block_height = %d AND in_longest = 1"""%block_height)
+
+ if not out: raise BaseException("block not found")
+ row = out[0]
+ (block_hash, block_version, hashMerkleRoot, nTime, nBits, nNonce, height,prev_block_hash, block_id) \
+ = ( self.hashout_hex(row[0]), int(row[1]), self.hashout_hex(row[2]), int(row[3]), int(row[4]), int(row[5]), int(row[6]), self.hashout_hex(row[7]), int(row[8]) )
+
+ out = {"block_height":block_height, "version":block_version, "prev_block_hash":prev_block_hash,
+ "merkle_root":hashMerkleRoot, "timestamp":nTime, "bits":nBits, "nonce":nNonce}
+ return out
+
+
+ def get_tx_merkle(self, tx_hash):
+
+ out = self.safe_sql("""
+ SELECT block_tx.block_id FROM tx
+ JOIN block_tx on tx.tx_id = block_tx.tx_id
+ JOIN chain_summary on chain_summary.block_id = block_tx.block_id
+ WHERE tx_hash='%s' AND in_longest = 1"""%tx_hash)
+ block_id = out[0]
+
+ # get block height
+ out = self.safe_sql("SELECT block_height FROM chain_summary WHERE block_id = %d AND in_longest = 1"%block_id)
+
+ if not out: raise BaseException("block not found")
+ block_height = int(out[0][0])
+
+ merkle = []
+ # list all tx in block
+ for row in self.safe_sql("""
+ SELECT DISTINCT tx_id, tx_pos, tx_hash
+ FROM txin_detail
+ WHERE block_id = ?
+ ORDER BY tx_pos""", (block_id,)):
+ tx_id, tx_pos, tx_h = row
+ merkle.append(tx_h)
+
+ # find subset.
+ # TODO: do not compute this on client request, better store the hash tree of each block in a database...
+ import hashlib
+ encode = lambda x: x[::-1].encode('hex')
+ decode = lambda x: x.decode('hex')[::-1]
+ Hash = lambda x: hashlib.sha256(hashlib.sha256(x).digest()).digest()
+
+ merkle = map(decode, merkle)
+ target_hash = decode(tx_hash)
+
+ s = []
+ while len(merkle) != 1:
+ if len(merkle)%2: merkle.append( merkle[-1] )
+ n = []
+ while merkle:
+ new_hash = Hash( merkle[0] + merkle[1] )
+ if merkle[0] == target_hash:
+ s.append( "L" + encode(merkle[1]))
+ target_hash = new_hash
+ elif merkle[1] == target_hash:
+ s.append( "R" + encode(merkle[0]))
+ target_hash = new_hash
+ n.append( new_hash )
+ merkle = merkle[2:]
+ merkle = n
+
+ # send result
+ return {"block_height":block_height,"merkle":s}
+
+
+
def memorypool_update(store):
ds = BCDataStream.BCDataStream()
- postdata = dumps({"method": 'getmemorypool', 'params': [], 'id':'jsonrpc'})
-
+ postdata = dumps({"method": 'getrawmempool', 'params': [], 'id':'jsonrpc'})
respdata = urllib.urlopen(store.bitcoind_url, postdata).read()
r = loads(respdata)
if r['error'] != None:
+ print r['error']
return
- v = r['result'].get('transactions')
- for hextx in v:
+ mempool_hashes = r.get('result')
+ for tx_hash in mempool_hashes:
+
+ if tx_hash in store.known_mempool_hashes: continue
+ store.known_mempool_hashes.append(tx_hash)
+
+ postdata = dumps({"method": 'getrawtransaction', 'params': [tx_hash], 'id':'jsonrpc'})
+ respdata = urllib.urlopen(store.bitcoind_url, postdata).read()
+ r = loads(respdata)
+ if r['error'] != None:
+ continue
+ hextx = r.get('result')
ds.clear()
ds.write(hextx.decode('hex'))
tx = deserialize.parse_Transaction(ds)
tx['hash'] = util.double_sha256(tx['tx'])
- tx_hash = store.hashin(tx['hash'])
-
+
if store.tx_find_id_and_value(tx):
pass
else:
tx_id = store.import_tx(tx, False)
store.update_tx_cache(tx_id)
#print tx_hash
-
+
store.commit()
+ store.known_mempool_hashes = mempool_hashes
def send_tx(self,tx):
- postdata = dumps({"method": 'importtransaction', 'params': [tx], 'id':'jsonrpc'})
+ postdata = dumps({"method": 'sendrawtransaction', 'params': [tx], 'id':'jsonrpc'})
respdata = urllib.urlopen(self.bitcoind_url, postdata).read()
r = loads(respdata)
if r['error'] != None:
with store.dblock:
store.catch_up()
store.memorypool_update()
- block_number = store.get_block_number(1)
+ block_number = store.get_block_number(store.chain_id)
return block_number
self.store = AbeStore(config)
self.block_number = -1
self.watched_addresses = []
+
+ # catch_up first
+ n = self.store.main_iteration()
+ print "blockchain: %d blocks"%n
+
threading.Timer(10, self.run_store_iteration).start()
def process(self, request):
error = str(e) + ': ' + address
print "error:", error
+ elif method == 'blockchain.block.get_header':
+ try:
+ height = params[0]
+ result = self.store.get_block_header( height )
+ except BaseException, e:
+ error = str(e) + ': %d'% height
+ print "error:", error
+
elif method == 'blockchain.transaction.broadcast':
txo = self.store.send_tx(params[0])
print "sent tx:", txo
result = txo
+ elif method == 'blockchain.transaction.get_merkle':
+ try:
+ tx_hash = params[0]
+ result = self.store.get_tx_merkle(tx_hash )
+ except BaseException, e:
+ error = str(e) + ': ' + tx_hash
+ print "error:", error
+
else:
error = "unknown method:%s"%method
if self.block_number != block_number:
self.block_number = block_number
print "block number:", self.block_number
- self.push_response({ 'method':'blockchain.numblocks.subscribe', 'params':[self.block_number] })
+ self.push_response({ 'id': None, 'method':'blockchain.numblocks.subscribe', 'params':[self.block_number] })
while True:
try:
break
if addr in self.watched_addresses:
status = self.store.get_status( addr )
- self.push_response({ 'method':'blockchain.address.subscribe', 'params':[addr, status] })
+ self.push_response({ 'id': None, 'method':'blockchain.address.subscribe', 'params':[addr, status] })
threading.Timer(10, self.run_store_iteration).start()