1 from Abe.util import hash_to_address, decode_check_address
2 from Abe.DataStore import DataStore as Datastore_class
3 from Abe import DataStore, readconf, BCDataStream, deserialize, util, base58
7 import thread, traceback, sys, urllib, operator
8 from json import dumps, loads
9 from Queue import Queue
10 import time, threading
14 encode = lambda x: x[::-1].encode('hex')
15 decode = lambda x: x.decode('hex')[::-1]
16 Hash = lambda x: hashlib.sha256(hashlib.sha256(x).digest()).digest()
19 return s.decode('hex')[::-1].encode('hex')
21 def int_to_hex(i, length=1):
22 s = hex(i)[2:].rstrip('L')
23 s = "0"*(2*length - len(s)) + s
26 def header_to_string(res):
27 s = int_to_hex(res.get('version'),4) \
28 + rev_hex(res.get('prev_block_hash')) \
29 + rev_hex(res.get('merkle_root')) \
30 + int_to_hex(int(res.get('timestamp')),4) \
31 + int_to_hex(int(res.get('bits')),4) \
32 + int_to_hex(int(res.get('nonce')),4)
36 class AbeStore(Datastore_class):
38 def __init__(self, config):
39 conf = DataStore.CONFIG_DEFAULTS
40 args, argv = readconf.parse_argv( [], conf)
41 args.dbtype = config.get('database','type')
42 if args.dbtype == 'sqlite3':
43 args.connect_args = { 'database' : config.get('database','database') }
44 elif args.dbtype == 'MySQLdb':
45 args.connect_args = { 'db' : config.get('database','database'), 'user' : config.get('database','username'), 'passwd' : config.get('database','password') }
46 elif args.dbtype == 'psycopg2':
47 args.connect_args = { 'database' : config.get('database','database') }
49 coin = config.get('server', 'coin')
51 if coin == 'litecoin':
52 print 'Litecoin settings:'
53 datadir = config.get('server','datadir')
54 print ' datadir = ' + datadir
55 args.datadir = [{"dirname":datadir,"chain":"Litecoin","code3":"LTC","address_version":"\u0030"}]
56 print ' addrtype = 48'
59 Datastore_class.__init__(self,args)
61 # Use 1 (Bitcoin) if chain_id is not sent
62 self.chain_id = self.datadirs[0]["chain_id"] or 1
63 print 'Coin chain_id = %d' % self.chain_id
65 self.sql_limit = int( config.get('database','limit') )
68 self.bitcoind_url = 'http://%s:%s@%s:%s/' % ( config.get('bitcoind','user'), config.get('bitcoind','password'), config.get('bitcoind','host'), config.get('bitcoind','port'))
72 self.address_queue = Queue()
74 self.lock = threading.Lock() # for the database
75 self.cache_lock = threading.Lock() # for the cache
77 self.known_mempool_hashes = []
81 def import_tx(self, tx, is_coinbase):
82 tx_id = super(AbeStore, self).import_tx(tx, is_coinbase)
83 self.last_tx_id = tx_id
89 def import_block(self, b, chain_ids=frozenset()):
91 block_id = super(AbeStore, self).import_block(b, chain_ids)
92 for pos in xrange(len(b['transactions'])):
93 tx = b['transactions'][pos]
95 tx['hash'] = util.double_sha256(tx['tx'])
96 tx_id = self.tx_find_id_and_value(tx)
98 self.update_tx_cache(tx_id)
100 print "error: import_block: no tx_id"
104 def update_tx_cache(self, txid):
105 inrows = self.get_tx_inputs(txid, False)
107 _hash = self.binout(row[6])
109 #print "WARNING: missing tx_in for tx", txid
112 address = hash_to_address(chr(self.addrtype), _hash)
113 with self.cache_lock:
114 if self.tx_cache.has_key(address):
115 print "cache: invalidating", address
116 self.tx_cache.pop(address)
118 self.address_queue.put(address)
120 outrows = self.get_tx_outputs(txid, False)
122 _hash = self.binout(row[6])
124 #print "WARNING: missing tx_out for tx", txid
127 address = hash_to_address(chr(self.addrtype), _hash)
128 with self.cache_lock:
129 if self.tx_cache.has_key(address):
130 print "cache: invalidating", address
131 self.tx_cache.pop(address)
133 self.address_queue.put(address)
135 def safe_sql(self,sql, params=(), lock=True):
139 if lock: self.lock.acquire()
140 ret = self.selectall(sql,params)
143 traceback.print_exc(file=sys.stdout)
145 if lock: self.lock.release()
148 raise BaseException('sql error')
153 def get_tx_outputs(self, tx_id, lock=True):
154 return self.safe_sql("""SELECT
156 txout.txout_scriptPubKey,
163 LEFT JOIN txin ON (txin.txout_id = txout.txout_id)
164 LEFT JOIN pubkey ON (pubkey.pubkey_id = txout.pubkey_id)
165 LEFT JOIN tx nexttx ON (txin.tx_id = nexttx.tx_id)
166 WHERE txout.tx_id = %d
167 ORDER BY txout.txout_pos
168 """%(tx_id), (), lock)
170 def get_tx_inputs(self, tx_id, lock=True):
171 return self.safe_sql(""" SELECT
175 COALESCE(prevtx.tx_hash, u.txout_tx_hash),
177 COALESCE(txout.txout_pos, u.txout_pos),
180 LEFT JOIN txout ON (txout.txout_id = txin.txout_id)
181 LEFT JOIN pubkey ON (pubkey.pubkey_id = txout.pubkey_id)
182 LEFT JOIN tx prevtx ON (txout.tx_id = prevtx.tx_id)
183 LEFT JOIN unlinked_txin u ON (u.txin_id = txin.txin_id)
184 WHERE txin.tx_id = %d
185 ORDER BY txin.txin_pos
186 """%(tx_id,), (), lock)
189 def get_address_out_rows(self, dbhash):
190 out = self.safe_sql(""" SELECT
200 FROM chain_candidate cc
201 JOIN block b ON (b.block_id = cc.block_id)
202 JOIN block_tx ON (block_tx.block_id = b.block_id)
203 JOIN tx ON (tx.tx_id = block_tx.tx_id)
204 JOIN txin ON (txin.tx_id = tx.tx_id)
205 JOIN txout prevout ON (txin.txout_id = prevout.txout_id)
206 JOIN pubkey ON (pubkey.pubkey_id = prevout.pubkey_id)
207 WHERE pubkey.pubkey_hash = ?
209 AND cc.in_longest = 1
210 LIMIT ? """, (dbhash, self.chain_id, self.sql_limit))
212 if len(out)==self.sql_limit:
213 raise BaseException('limit reached')
216 def get_address_out_rows_memorypool(self, dbhash):
217 out = self.safe_sql(""" SELECT
224 JOIN txin ON (txin.tx_id = tx.tx_id)
225 JOIN txout prevout ON (txin.txout_id = prevout.txout_id)
226 JOIN pubkey ON (pubkey.pubkey_id = prevout.pubkey_id)
227 WHERE pubkey.pubkey_hash = ?
228 LIMIT ? """, (dbhash,self.sql_limit))
230 if len(out)==self.sql_limit:
231 raise BaseException('limit reached')
234 def get_address_in_rows(self, dbhash):
235 out = self.safe_sql(""" SELECT
245 FROM chain_candidate cc
246 JOIN block b ON (b.block_id = cc.block_id)
247 JOIN block_tx ON (block_tx.block_id = b.block_id)
248 JOIN tx ON (tx.tx_id = block_tx.tx_id)
249 JOIN txout ON (txout.tx_id = tx.tx_id)
250 JOIN pubkey ON (pubkey.pubkey_id = txout.pubkey_id)
251 WHERE pubkey.pubkey_hash = ?
253 AND cc.in_longest = 1
254 LIMIT ? """, (dbhash, self.chain_id, self.sql_limit))
256 if len(out)==self.sql_limit:
257 raise BaseException('limit reached')
260 def get_address_in_rows_memorypool(self, dbhash):
261 out = self.safe_sql( """ SELECT
268 JOIN txout ON (txout.tx_id = tx.tx_id)
269 JOIN pubkey ON (pubkey.pubkey_id = txout.pubkey_id)
270 WHERE pubkey.pubkey_hash = ?
271 LIMIT ? """, (dbhash,self.sql_limit))
273 if len(out)==self.sql_limit:
274 raise BaseException('limit reached')
279 def get_history(self, addr, cache_only=False):
280 with self.cache_lock:
281 cached_version = self.tx_cache.get( addr )
282 if cached_version is not None:
283 return cached_version
285 if cache_only: return -1
287 version, binaddr = decode_check_address(addr)
291 dbhash = self.binin(binaddr)
293 rows += self.get_address_out_rows( dbhash )
294 rows += self.get_address_in_rows( dbhash )
301 nTime, chain_id, height, is_in, blk_hash, tx_hash, tx_id, pos, value = row
303 print "cannot unpack row", row
305 tx_hash = self.hashout_hex(tx_hash)
307 "timestamp": int(nTime),
308 "height": int(height),
309 "is_input": int(is_in),
310 "block_hash": self.hashout_hex(blk_hash),
317 txpoints.append(txpoint)
318 known_tx.append(self.hashout_hex(tx_hash))
321 # todo: sort them really...
322 txpoints = sorted(txpoints, key=operator.itemgetter("timestamp"))
326 rows += self.get_address_in_rows_memorypool( dbhash )
327 rows += self.get_address_out_rows_memorypool( dbhash )
328 address_has_mempool = False
331 is_in, tx_hash, tx_id, pos, value = row
332 tx_hash = self.hashout_hex(tx_hash)
333 if tx_hash in known_tx:
336 # discard transactions that are too old
337 if self.last_tx_id - tx_id > 50000:
338 print "discarding tx id", tx_id
341 # this means that pending transactions were added to the db, even if they are not returned by getmemorypool
342 address_has_mempool = True
344 #print "mempool", tx_hash
348 "is_input": int(is_in),
349 "block_hash": 'mempool',
355 txpoints.append(txpoint)
358 for txpoint in txpoints:
359 tx_id = txpoint['tx_id']
362 inrows = self.get_tx_inputs(tx_id)
364 _hash = self.binout(row[6])
366 #print "WARNING: missing tx_in for tx", tx_id, addr
368 address = hash_to_address(chr(self.addrtype), _hash)
369 txinputs.append(address)
370 txpoint['inputs'] = txinputs
372 outrows = self.get_tx_outputs(tx_id)
374 _hash = self.binout(row[6])
376 #print "WARNING: missing tx_out for tx", tx_id, addr
378 address = hash_to_address(chr(self.addrtype), _hash)
379 txoutputs.append(address)
380 txpoint['outputs'] = txoutputs
382 # for all unspent inputs, I want their scriptpubkey. (actually I could deduce it from the address)
383 if not txpoint['is_input']:
384 # detect if already redeemed...
386 if row[6] == dbhash: break
389 #row = self.get_tx_output(tx_id,dbhash)
390 # pos, script, value, o_hash, o_id, o_pos, binaddr = row
391 # if not redeemed, we add the script
393 if not row[4]: txpoint['raw_output_script'] = row[1]
398 # do not cache mempool results because statuses are ambiguous
399 if not address_has_mempool:
400 with self.cache_lock:
401 self.tx_cache[addr] = txpoints
405 def get_history2(self, addr, cache_only=False):
406 h = self.get_history(addr, cache_only)
407 if cache_only and h==-1: return -1
409 out = map(lambda x: {'tx_hash':x['tx_hash'], 'height':x['height']}, h)
412 if item not in out2: out2.append(item)
416 def get_status(self, addr, cache_only=False):
417 # get address status, i.e. the last block for that address.
418 tx_points = self.get_history(addr, cache_only)
419 if cache_only and tx_points == -1: return -1
424 lastpoint = tx_points[-1]
425 status = lastpoint['block_hash']
426 # this is a temporary hack; move it up once old clients have disappeared
427 if status == 'mempool': # and session['version'] != "old":
428 status = status + ':%d'% len(tx_points)
431 def get_status2(self, addr, cache_only=False):
433 tx_points = self.get_history2(addr, cache_only)
434 if cache_only and tx_points == -1: return -1
436 if not tx_points: return None
439 status += tx.get('tx_hash') + ':%d:' % tx.get('height')
440 return hashlib.sha256( status ).digest().encode('hex')
443 def get_block_header(self, block_height):
444 out = self.safe_sql("""
448 block_hashMerkleRoot,
456 WHERE block_height = %d AND in_longest = 1"""%block_height)
458 if not out: raise BaseException("block not found")
460 (block_hash, block_version, hashMerkleRoot, nTime, nBits, nNonce, height,prev_block_hash, block_id) \
461 = ( self.hashout_hex(row[0]), int(row[1]), self.hashout_hex(row[2]), int(row[3]), int(row[4]), int(row[5]), int(row[6]), self.hashout_hex(row[7]), int(row[8]) )
463 out = {"block_height":block_height, "version":block_version, "prev_block_hash":prev_block_hash,
464 "merkle_root":hashMerkleRoot, "timestamp":nTime, "bits":nBits, "nonce":nNonce}
468 def get_chunk(self, index):
469 with self.cache_lock:
470 msg = self.chunk_cache.get(index)
477 block_hashMerkleRoot,
485 WHERE block_height >= %d AND block_height< %d AND in_longest = 1 ORDER BY block_height"""%(index*2016, (index+1)*2016)
487 out = self.safe_sql(sql)
490 (block_hash, block_version, hashMerkleRoot, nTime, nBits, nNonce, height, prev_block_hash, block_height) \
491 = ( self.hashout_hex(row[0]), int(row[1]), self.hashout_hex(row[2]), int(row[3]), int(row[4]), int(row[5]), int(row[6]), self.hashout_hex(row[7]), int(row[8]) )
492 h = {"block_height":block_height, "version":block_version, "prev_block_hash":prev_block_hash,
493 "merkle_root":hashMerkleRoot, "timestamp":nTime, "bits":nBits, "nonce":nNonce}
495 if h.get('block_height')==0: h['prev_block_hash'] = "0"*64
496 msg += header_to_string(h)
498 #print "hash", encode(Hash(msg.decode('hex')))
499 #if h.get('block_height')==1:break
501 with self.cache_lock:
502 self.chunk_cache[index] = msg
503 print "get_chunk", index, len(msg)
508 def get_raw_tx(self, tx_hash, height):
509 postdata = dumps({"method": 'getrawtransaction', 'params': [tx_hash, 0, height], 'id':'jsonrpc'})
510 respdata = urllib.urlopen(self.bitcoind_url, postdata).read()
512 if r['error'] != None:
513 raise BaseException(r['error'])
515 hextx = r.get('result')
519 def get_tx_merkle(self, tx_hash):
521 out = self.safe_sql("""
522 SELECT block_tx.block_id FROM tx
523 JOIN block_tx on tx.tx_id = block_tx.tx_id
524 JOIN chain_summary on chain_summary.block_id = block_tx.block_id
525 WHERE tx_hash='%s' AND in_longest = 1"""%tx_hash)
527 if not out: raise BaseException("not in a block")
528 block_id = int(out[0][0])
531 out = self.safe_sql("SELECT block_height FROM chain_summary WHERE block_id = %d AND in_longest = 1"%block_id)
533 if not out: raise BaseException("block not found")
534 block_height = int(out[0][0])
539 # list all tx in block
540 for row in self.safe_sql("""
541 SELECT DISTINCT tx_id, tx_pos, tx_hash
544 ORDER BY tx_pos""", (block_id,)):
545 _id, _pos, _hash = row
547 if _hash == tx_hash: tx_pos = int(_pos)
550 # TODO: do not compute this on client request, better store the hash tree of each block in a database...
552 merkle = map(decode, merkle)
553 target_hash = decode(tx_hash)
556 while len(merkle) != 1:
557 if len(merkle)%2: merkle.append( merkle[-1] )
560 new_hash = Hash( merkle[0] + merkle[1] )
561 if merkle[0] == target_hash:
562 s.append( encode(merkle[1]))
563 target_hash = new_hash
564 elif merkle[1] == target_hash:
565 s.append( encode(merkle[0]))
566 target_hash = new_hash
572 return {"block_height":block_height, "merkle":s, "pos":tx_pos}
577 def memorypool_update(store):
579 ds = BCDataStream.BCDataStream()
580 postdata = dumps({"method": 'getrawmempool', 'params': [], 'id':'jsonrpc'})
581 respdata = urllib.urlopen(store.bitcoind_url, postdata).read()
584 if r['error'] != None:
588 mempool_hashes = r.get('result')
591 for tx_hash in mempool_hashes:
593 if tx_hash in store.known_mempool_hashes: continue
594 store.known_mempool_hashes.append(tx_hash)
597 postdata = dumps({"method": 'getrawtransaction', 'params': [tx_hash], 'id':'jsonrpc'})
598 respdata = urllib.urlopen(store.bitcoind_url, postdata).read()
600 if r['error'] != None:
602 hextx = r.get('result')
604 ds.write(hextx.decode('hex'))
605 tx = deserialize.parse_Transaction(ds)
606 tx['hash'] = util.double_sha256(tx['tx'])
608 if store.tx_find_id_and_value(tx):
611 tx_id = store.import_tx(tx, False)
612 store.update_tx_cache(tx_id)
616 store.known_mempool_hashes = mempool_hashes
620 def send_tx(self,tx):
621 postdata = dumps({"method": 'sendrawtransaction', 'params': [tx], 'id':'jsonrpc'})
622 respdata = urllib.urlopen(self.bitcoind_url, postdata).read()
624 if r['error'] != None:
625 msg = r['error'].get('message')
626 out = "error: transaction rejected by memorypool: " + msg + "\n" + tx
632 def main_iteration(self):
637 time_catch_up = t2 - t1
638 n = self.memorypool_update()
639 time_mempool = time.time() - t2
640 height = self.get_block_number( self.chain_id )
642 with self.cache_lock:
644 self.chunk_cache.pop(height/2016)
648 block_header = self.get_block_header( height )
649 return block_header, time_catch_up, time_mempool, n
655 # if there is an exception, do rollback and then re-raise the exception
656 for dircfg in store.datadirs:
658 store.catch_up_dir(dircfg)
660 store.log.exception("Failed to catch up %s", dircfg)
667 from processor import Processor
669 class BlockchainProcessor(Processor):
671 def __init__(self, config, shared):
672 Processor.__init__(self)
673 self.store = AbeStore(config)
674 self.watched_addresses = []
678 self.block_header, time_catch_up, time_mempool, n = self.store.main_iteration()
679 self.block_number = self.block_header.get('block_height')
680 print "blockchain: %d blocks"%self.block_number
682 threading.Timer(10, self.run_store_iteration).start()
685 def add_request(self, request):
686 # see if we can get if from cache. if not, add to queue
687 if self.process( request, cache_only = True) == -1:
688 self.queue.put(request)
691 def process(self, request, cache_only = False):
692 #print "abe process", request
694 message_id = request['id']
695 method = request['method']
696 params = request.get('params',[])
700 if method == 'blockchain.numblocks.subscribe':
701 result = self.block_number
703 elif method == 'blockchain.headers.subscribe':
704 result = self.block_header
706 elif method == 'blockchain.address.subscribe':
709 result = self.store.get_status(address, cache_only)
710 self.watch_address(address)
711 except BaseException, e:
712 error = str(e) + ': ' + address
713 print "error:", error
715 elif method == 'blockchain.address.subscribe2':
718 result = self.store.get_status2(address, cache_only)
719 self.watch_address(address)
720 except BaseException, e:
721 error = str(e) + ': ' + address
722 print "error:", error
724 elif method == 'blockchain.address.get_history':
727 result = self.store.get_history( address, cache_only )
728 except BaseException, e:
729 error = str(e) + ': ' + address
730 print "error:", error
732 elif method == 'blockchain.address.get_history2':
735 result = self.store.get_history2( address, cache_only )
736 except BaseException, e:
737 error = str(e) + ': ' + address
738 print "error:", error
740 elif method == 'blockchain.block.get_header':
746 result = self.store.get_block_header( height )
747 except BaseException, e:
748 error = str(e) + ': %d'% height
749 print "error:", error
751 elif method == 'blockchain.block.get_chunk':
757 result = self.store.get_chunk( index )
758 except BaseException, e:
759 error = str(e) + ': %d'% index
760 print "error:", error
762 elif method == 'blockchain.transaction.broadcast':
763 txo = self.store.send_tx(params[0])
764 print "sent tx:", txo
767 elif method == 'blockchain.transaction.get_merkle':
773 result = self.store.get_tx_merkle(tx_hash )
774 except BaseException, e:
775 error = str(e) + ': ' + tx_hash
776 print "error:", error
778 elif method == 'blockchain.transaction.get':
782 result = self.store.get_raw_tx(tx_hash, height )
783 except BaseException, e:
784 error = str(e) + ': ' + tx_hash
785 print "error:", error
788 error = "unknown method:%s"%method
790 if cache_only and result == -1: return -1
793 response = { 'id':message_id, 'error':error }
794 self.push_response(response)
796 response = { 'id':message_id, 'result':result }
797 self.push_response(response)
800 def watch_address(self, addr):
801 if addr not in self.watched_addresses:
802 self.watched_addresses.append(addr)
805 def run_store_iteration(self):
808 block_header, time_catch_up, time_mempool, n = self.store.main_iteration()
810 traceback.print_exc(file=sys.stdout)
814 if self.shared.stopped():
818 #print "block number: %d (%.3fs) mempool:%d (%.3fs)"%(self.block_number, time_catch_up, n, time_mempool)
820 if self.block_number != block_header.get('block_height'):
821 self.block_number = block_header.get('block_height')
822 print "block number: %d (%.3fs)"%(self.block_number, time_catch_up)
823 self.push_response({ 'id': None, 'method':'blockchain.numblocks.subscribe', 'params':[self.block_number] })
825 if self.block_header != block_header:
826 self.block_header = block_header
827 self.push_response({ 'id': None, 'method':'blockchain.headers.subscribe', 'params':[self.block_header] })
832 addr = self.store.address_queue.get(False)
835 if addr in self.watched_addresses:
836 status = self.store.get_status( addr )
837 status2 = self.store.get_status2( addr )
838 self.push_response({ 'id': None, 'method':'blockchain.address.subscribe', 'params':[addr, status] })
839 self.push_response({ 'id': None, 'method':'blockchain.address.subscribe2', 'params':[addr, status2] })
841 threading.Timer(10, self.run_store_iteration).start()