3 from json import dumps, loads
6 from Queue import Queue
14 from backends.bitcoind import deserialize
15 from processor import Processor, print_log
19 class BlockchainProcessor(Processor):
21 def __init__(self, config, shared):
22 Processor.__init__(self)
26 self.up_to_date = False
27 self.watched_addresses = []
28 self.history_cache = {}
30 self.cache_lock = threading.Lock()
31 self.headers_data = ''
33 self.mempool_addresses = {}
34 self.mempool_hist = {}
35 self.mempool_hashes = []
36 self.mempool_lock = threading.Lock()
38 self.address_queue = Queue()
39 self.dbpath = config.get('leveldb', 'path')
40 self.pruning_limit = config.getint('leveldb', 'pruning_limit')
41 self.db_version = 1 # increase this when database needs to be updated
43 self.dblock = threading.Lock()
45 self.db = leveldb.LevelDB(self.dbpath, paranoid_checks=True)
47 traceback.print_exc(file=sys.stdout)
50 self.bitcoind_url = 'http://%s:%s@%s:%s/' % (
51 config.get('bitcoind', 'user'),
52 config.get('bitcoind', 'password'),
53 config.get('bitcoind', 'host'),
54 config.get('bitcoind', 'port'))
58 self.bitcoind('getinfo')
61 print_log('cannot contact bitcoind...')
68 self.sent_header = None
71 hist = self.deserialize(self.db.Get('height'))
72 self.last_hash, self.height, db_version = hist[0]
73 print_log("Database version", self.db_version)
74 print_log("Blockchain height", self.height)
76 traceback.print_exc(file=sys.stdout)
77 print_log('initializing database')
79 self.last_hash = '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'
80 db_version = self.db_version
83 if self.db_version != db_version:
84 print_log("Your database '%s' is deprecated. Please create a new database"%self.dbpath)
89 self.init_headers(self.height)
91 threading.Timer(0, lambda: self.catch_up(sync=False)).start()
92 while not shared.stopped() and not self.up_to_date:
96 print "keyboard interrupt: stopping threads"
100 print_log("Blockchain is up to date.")
101 self.memorypool_update()
102 print_log("Memory pool initialized.")
104 threading.Timer(10, self.main_iteration).start()
106 def bitcoind(self, method, params=[]):
107 postdata = dumps({"method": method, 'params': params, 'id': 'jsonrpc'})
109 respdata = urllib.urlopen(self.bitcoind_url, postdata).read()
111 traceback.print_exc(file=sys.stdout)
115 if r['error'] is not None:
116 raise BaseException(r['error'])
117 return r.get('result')
119 def serialize(self, h):
121 for txid, txpos, height in h:
122 s += self.serialize_item(txid, txpos, height)
125 def serialize_item(self, txid, txpos, height, spent=chr(0)):
126 s = (txid + int_to_hex(txpos, 4) + int_to_hex(height, 3)).decode('hex') + spent
129 def deserialize_item(self,s):
130 txid = s[0:32].encode('hex')
131 txpos = int(rev_hex(s[32:36].encode('hex')), 16)
132 height = int(rev_hex(s[36:39].encode('hex')), 16)
134 return (txid, txpos, height, spent)
136 def deserialize(self, s):
139 txid, txpos, height, spent = self.deserialize_item(s[0:40])
140 h.append((txid, txpos, height))
142 txid, txpos, height, spent = self.deserialize_item(s[40:80])
143 h.append((txid, txpos, height))
147 def block2header(self, b):
149 "block_height": b.get('height'),
150 "version": b.get('version'),
151 "prev_block_hash": b.get('previousblockhash'),
152 "merkle_root": b.get('merkleroot'),
153 "timestamp": b.get('time'),
154 "bits": int(b.get('bits'), 16),
155 "nonce": b.get('nonce'),
158 def get_header(self, height):
159 block_hash = self.bitcoind('getblockhash', [height])
160 b = self.bitcoind('getblock', [block_hash])
161 return self.block2header(b)
163 def init_headers(self, db_height):
164 self.chunk_cache = {}
165 self.headers_filename = os.path.join(self.dbpath, 'blockchain_headers')
167 if os.path.exists(self.headers_filename):
168 height = os.path.getsize(self.headers_filename)/80 - 1 # the current height
170 prev_hash = self.hash_header(self.read_header(height))
174 open(self.headers_filename, 'wb').close()
178 if height < db_height:
179 print_log("catching up missing headers:", height, db_height)
182 while height < db_height:
184 header = self.get_header(height)
186 assert prev_hash == header.get('prev_block_hash')
187 self.write_header(header, sync=False)
188 prev_hash = self.hash_header(header)
189 if (height % 1000) == 0:
190 print_log("headers file:", height)
191 except KeyboardInterrupt:
197 def hash_header(self, header):
198 return rev_hex(Hash(header_to_string(header).decode('hex')).encode('hex'))
200 def read_header(self, block_height):
201 if os.path.exists(self.headers_filename):
202 with open(self.headers_filename, 'rb') as f:
203 f.seek(block_height * 80)
206 h = header_from_string(h)
209 def read_chunk(self, index):
210 with open(self.headers_filename, 'rb') as f:
211 f.seek(index*2016*80)
212 chunk = f.read(2016*80)
213 return chunk.encode('hex')
215 def write_header(self, header, sync=True):
216 if not self.headers_data:
217 self.headers_offset = header.get('block_height')
219 self.headers_data += header_to_string(header).decode('hex')
220 if sync or len(self.headers_data) > 40*100:
223 with self.cache_lock:
224 chunk_index = header.get('block_height')/2016
225 if self.chunk_cache.get(chunk_index):
226 self.chunk_cache.pop(chunk_index)
228 def pop_header(self):
229 # we need to do this only if we have not flushed
230 if self.headers_data:
231 self.headers_data = self.headers_data[:-40]
233 def flush_headers(self):
234 if not self.headers_data:
236 with open(self.headers_filename, 'rb+') as f:
237 f.seek(self.headers_offset*80)
238 f.write(self.headers_data)
239 self.headers_data = ''
241 def get_chunk(self, i):
242 # store them on disk; store the current chunk in memory
243 with self.cache_lock:
244 chunk = self.chunk_cache.get(i)
246 chunk = self.read_chunk(i)
247 self.chunk_cache[i] = chunk
251 def get_mempool_transaction(self, txid):
253 raw_tx = self.bitcoind('getrawtransaction', [txid, 0])
257 vds = deserialize.BCDataStream()
258 vds.write(raw_tx.decode('hex'))
260 return deserialize.parse_Transaction(vds, is_coinbase=False)
262 print_log("ERROR: cannot parse", txid)
265 def get_history(self, addr, cache_only=False):
266 with self.cache_lock:
267 hist = self.history_cache.get(addr)
275 hist = self.deserialize(self.db.Get(addr))
281 # sort history, because redeeming transactions are next to the corresponding txout
282 hist.sort(key=lambda tup: tup[2])
285 with self.mempool_lock:
286 for txid in self.mempool_hist.get(addr, []):
287 hist.append((txid, 0, 0))
290 hist = set(map(lambda x: (x[0], x[2]), hist))
293 hist = map(lambda x: {'tx_hash': x[0], 'height': x[1]}, hist)
295 # add something to distinguish between unused and empty addresses
296 if hist == [] and is_known:
299 with self.cache_lock:
300 self.history_cache[addr] = hist
303 def get_status(self, addr, cache_only=False):
304 tx_points = self.get_history(addr, cache_only)
305 if cache_only and tx_points == -1:
310 if tx_points == ['*']:
314 status += tx.get('tx_hash') + ':%d:' % tx.get('height')
315 return hashlib.sha256(status).digest().encode('hex')
317 def get_merkle(self, tx_hash, height):
319 block_hash = self.bitcoind('getblockhash', [height])
320 b = self.bitcoind('getblock', [block_hash])
321 tx_list = b.get('tx')
322 tx_pos = tx_list.index(tx_hash)
324 merkle = map(hash_decode, tx_list)
325 target_hash = hash_decode(tx_hash)
327 while len(merkle) != 1:
329 merkle.append(merkle[-1])
332 new_hash = Hash(merkle[0] + merkle[1])
333 if merkle[0] == target_hash:
334 s.append(hash_encode(merkle[1]))
335 target_hash = new_hash
336 elif merkle[1] == target_hash:
337 s.append(hash_encode(merkle[0]))
338 target_hash = new_hash
343 return {"block_height": height, "merkle": s, "pos": tx_pos}
346 def add_to_history(self, addr, tx_hash, tx_pos, tx_height):
348 s = self.serialize_item(tx_hash, tx_pos, tx_height) + 40*chr(0)
351 serialized_hist = self.batch_list[addr]
353 l = len(serialized_hist)/80
354 for i in range(l-1, -1, -1):
355 item = serialized_hist[80*i:80*(i+1)]
356 item_height = int(rev_hex(item[36:39].encode('hex')), 16)
357 if item_height <= tx_height:
358 serialized_hist = serialized_hist[0:80*(i+1)] + s + serialized_hist[80*(i+1):]
361 serialized_hist = s + serialized_hist
363 self.batch_list[addr] = serialized_hist
366 txo = (tx_hash + int_to_hex(tx_pos, 4)).decode('hex')
367 self.batch_txio[txo] = addr
371 def revert_add_to_history(self, addr, tx_hash, tx_pos, tx_height):
373 serialized_hist = self.batch_list[addr]
374 s = self.serialize_item(tx_hash, tx_pos, tx_height) + 40*chr(0)
375 if serialized_hist.find(s) == -1: raise
376 serialized_hist = serialized_hist.replace(s, '')
377 self.batch_list[addr] = serialized_hist
381 def prune_history(self, addr, undo):
382 # remove items that have bit set to one
383 if undo.get(addr) is None: undo[addr] = []
385 serialized_hist = self.batch_list[addr]
386 l = len(serialized_hist)/80
388 if len(serialized_hist)/80 < self.pruning_limit: break
389 item = serialized_hist[80*i:80*(i+1)]
390 if item[39:40] == chr(1):
391 assert item[79:80] == chr(2)
392 serialized_hist = serialized_hist[0:80*i] + serialized_hist[80*(i+1):]
393 undo[addr].append(item) # items are ordered
394 self.batch_list[addr] = serialized_hist
397 def revert_prune_history(self, addr, undo):
398 # restore removed items
399 serialized_hist = self.batch_list[addr]
401 if undo.get(addr) is not None:
402 itemlist = undo.pop(addr)
406 if not itemlist: return
408 l = len(serialized_hist)/80
410 for i in range(l-1, -1, -1):
415 tx_item = itemlist.pop(-1) # get the last element
416 tx_height = int(rev_hex(tx_item[36:39].encode('hex')), 16)
418 item = serialized_hist[80*i:80*(i+1)]
419 item_height = int(rev_hex(item[36:39].encode('hex')), 16)
421 if item_height < tx_height:
422 serialized_hist = serialized_hist[0:80*(i+1)] + tx_item + serialized_hist[80*(i+1):]
426 serialized_hist = ''.join(itemlist) + tx_item + serialized_hist
428 self.batch_list[addr] = serialized_hist
431 def set_spent_bit(self, addr, txi, is_spent, txid=None, index=None, height=None):
432 serialized_hist = self.batch_list[addr]
433 l = len(serialized_hist)/80
435 item = serialized_hist[80*i:80*(i+1)]
436 if item[0:36] == txi:
438 new_item = item[0:39] + chr(1) + self.serialize_item(txid, index, height, chr(2))
440 new_item = item[0:39] + chr(0) + chr(0)*40
441 serialized_hist = serialized_hist[0:80*i] + new_item + serialized_hist[80*(i+1):]
445 hist = self.deserialize(serialized_hist)
446 raise BaseException("prevout not found", addr, hist, txi.encode('hex'))
448 self.batch_list[addr] = serialized_hist
451 def unset_spent_bit(self, addr, txi):
452 self.set_spent_bit(addr, txi, False)
453 self.batch_txio[txi] = addr
456 def deserialize_block(self, block):
457 txlist = block.get('tx')
458 tx_hashes = [] # ordered txids
459 txdict = {} # deserialized tx
461 for raw_tx in txlist:
462 tx_hash = hash_encode(Hash(raw_tx.decode('hex')))
463 vds = deserialize.BCDataStream()
464 vds.write(raw_tx.decode('hex'))
466 tx = deserialize.parse_Transaction(vds, is_coinbase)
468 print_log("ERROR: cannot parse", tx_hash)
470 tx_hashes.append(tx_hash)
473 return tx_hashes, txdict
475 def get_undo_info(self, height):
476 s = self.db.Get("undo%d" % (height % 100))
479 def write_undo_info(self, batch, height, undo_info):
480 if self.is_test or height > self.bitcoind_height - 100:
481 batch.Put("undo%d" % (height % 100), repr(undo_info))
483 def import_block(self, block, block_hash, block_height, sync, revert=False):
485 self.batch_list = {} # address -> history
486 self.batch_txio = {} # transaction i/o -> address
492 # deserialize transactions
494 tx_hashes, txdict = self.deserialize_block(block)
500 undo_info = self.get_undo_info(block_height)
506 # read addresses of tx inputs
507 for tx in txdict.values():
508 for x in tx.get('inputs'):
509 txi = (x.get('prevout_hash') + int_to_hex(x.get('prevout_n'), 4)).decode('hex')
510 block_inputs.append(txi)
513 for txi in block_inputs:
515 addr = self.db.Get(txi)
517 # the input could come from the same block
520 traceback.print_exc(file=sys.stdout)
524 self.batch_txio[txi] = addr
525 addr_to_read.append(addr)
528 for txid, tx in txdict.items():
529 for x in tx.get('outputs'):
530 txo = (txid + int_to_hex(x.get('index'), 4)).decode('hex')
531 block_outputs.append(txo)
532 addr_to_read.append( x.get('address') )
534 undo = undo_info.get(txid)
535 for i, x in enumerate(tx.get('inputs')):
536 addr = undo['prev_addr'][i]
537 addr_to_read.append(addr)
543 # read histories of addresses
544 for txid, tx in txdict.items():
545 for x in tx.get('outputs'):
546 addr_to_read.append(x.get('address'))
549 for addr in addr_to_read:
551 self.batch_list[addr] = self.db.Get(addr)
553 self.batch_list[addr] = ''
555 traceback.print_exc(file=sys.stdout)
564 tx_hashes = tx_hashes[::-1]
567 for txid in tx_hashes: # must be ordered
571 undo = { 'prev_addr':[] } # contains the list of pruned items for each address in the tx; also, 'prev_addr' is a list of prev addresses
574 for i, x in enumerate(tx.get('inputs')):
575 txi = (x.get('prevout_hash') + int_to_hex(x.get('prevout_n'), 4)).decode('hex')
576 addr = self.batch_txio[txi]
578 # add redeem item to the history.
579 # add it right next to the input txi? this will break history sorting, but it's ok if I neglect tx inputs during search
580 self.set_spent_bit(addr, txi, True, txid, i, block_height)
582 # when I prune, prune a pair
583 self.prune_history(addr, undo)
584 prev_addr.append(addr)
586 undo['prev_addr'] = prev_addr
588 # here I add only the outputs to history; maybe I want to add inputs too (that's in the other loop)
589 for x in tx.get('outputs'):
590 addr = x.get('address')
591 self.add_to_history(addr, txid, x.get('index'), block_height)
592 self.prune_history(addr, undo) # prune here because we increased the length of the history
594 undo_info[txid] = undo
598 undo = undo_info.pop(txid)
600 for x in tx.get('outputs'):
601 addr = x.get('address')
602 self.revert_prune_history(addr, undo)
603 self.revert_add_to_history(addr, txid, x.get('index'), block_height)
605 prev_addr = undo.pop('prev_addr')
606 for i, x in enumerate(tx.get('inputs')):
608 self.revert_prune_history(addr, undo)
609 txi = (x.get('prevout_hash') + int_to_hex(x.get('prevout_n'), 4)).decode('hex')
610 self.unset_spent_bit(addr, txi)
615 assert undo_info == {}
623 batch = leveldb.WriteBatch()
624 for addr, serialized_hist in self.batch_list.items():
625 batch.Put(addr, serialized_hist)
626 l = len(serialized_hist)/80
632 # add new created outputs
633 for txio, addr in self.batch_txio.items():
634 batch.Put(txio, addr)
635 # delete spent inputs
636 for txi in block_inputs:
639 self.write_undo_info(batch, block_height, undo_info)
641 # restore spent inputs
642 for txio, addr in self.batch_txio.items():
643 # print "restoring spent input", repr(txio)
644 batch.Put(txio, addr)
645 # delete spent outputs
646 for txo in block_outputs:
650 batch.Put('height', self.serialize([(block_hash, block_height, self.db_version)]))
653 self.db.Write(batch, sync=sync)
656 if t3 - t0 > 10 and not sync:
657 print_log("block", block_height,
658 "parse:%0.2f " % (t00 - t0),
659 "read:%0.2f " % (t1 - t00),
660 "proc:%.2f " % (t2-t1),
661 "write:%.2f " % (t3-t2),
662 "max:", max_len, max_addr)
664 for addr in self.batch_list.keys():
665 self.invalidate_cache(addr)
667 def add_request(self, request):
668 # see if we can get if from cache. if not, add to queue
669 if self.process(request, cache_only=True) == -1:
670 self.queue.put(request)
672 def process(self, request, cache_only=False):
673 #print "abe process", request
675 message_id = request['id']
676 method = request['method']
677 params = request.get('params', [])
681 if method == 'blockchain.numblocks.subscribe':
684 elif method == 'blockchain.headers.subscribe':
687 elif method == 'blockchain.address.subscribe':
690 result = self.get_status(address, cache_only)
691 self.watch_address(address)
692 except BaseException, e:
693 error = str(e) + ': ' + address
694 print_log("error:", error)
696 elif method == 'blockchain.address.unsubscribe':
700 if password == self.config.get('server', 'password'):
701 self.watched_addresses.remove(address)
702 # print_log('unsubscribed', address)
705 print_log('incorrect password')
706 result = "authentication error"
707 except BaseException, e:
708 error = str(e) + ': ' + address
709 print_log("error:", error)
711 elif method == 'blockchain.address.get_history':
714 result = self.get_history(address, cache_only)
715 except BaseException, e:
716 error = str(e) + ': ' + address
717 print_log("error:", error)
719 elif method == 'blockchain.block.get_header':
725 result = self.get_header(height)
726 except BaseException, e:
727 error = str(e) + ': %d' % height
728 print_log("error:", error)
730 elif method == 'blockchain.block.get_chunk':
736 result = self.get_chunk(index)
737 except BaseException, e:
738 error = str(e) + ': %d' % index
739 print_log("error:", error)
741 elif method == 'blockchain.transaction.broadcast':
743 txo = self.bitcoind('sendrawtransaction', params)
744 print_log("sent tx:", txo)
746 except BaseException, e:
747 result = str(e) # do not send an error
748 print_log("error:", result, params)
750 elif method == 'blockchain.transaction.get_merkle':
756 tx_height = params[1]
757 result = self.get_merkle(tx_hash, tx_height)
758 except BaseException, e:
759 error = str(e) + ': ' + repr(params)
760 print_log("get_merkle error:", error)
762 elif method == 'blockchain.transaction.get':
765 result = self.bitcoind('getrawtransaction', [tx_hash, 0])
766 except BaseException, e:
767 error = str(e) + ': ' + repr(params)
768 print_log("tx get error:", error)
771 error = "unknown method:%s" % method
773 if cache_only and result == -1:
777 self.push_response({'id': message_id, 'error': error})
779 self.push_response({'id': message_id, 'result': result})
781 def watch_address(self, addr):
782 if addr not in self.watched_addresses:
783 self.watched_addresses.append(addr)
785 def getfullblock(self, block_hash):
786 block = self.bitcoind('getblock', [block_hash])
790 for txid in block['tx']:
792 "method": "getrawtransaction",
798 postdata = dumps(rawtxreq)
800 respdata = urllib.urlopen(self.bitcoind_url, postdata).read()
802 traceback.print_exc(file=sys.stdout)
808 if ir['error'] is not None:
810 print_log("Error: make sure you run bitcoind with txindex=1; use -reindex if needed.")
811 raise BaseException(ir['error'])
812 rawtxdata.append(ir['result'])
813 block['tx'] = rawtxdata
816 def catch_up(self, sync=True):
819 while not self.shared.stopped():
821 info = self.bitcoind('getinfo')
822 self.bitcoind_height = info.get('blocks')
823 bitcoind_block_hash = self.bitcoind('getblockhash', [self.bitcoind_height])
824 if self.last_hash == bitcoind_block_hash:
825 self.up_to_date = True
829 self.up_to_date = False
830 next_block_hash = self.bitcoind('getblockhash', [self.height + 1])
831 next_block = self.getfullblock(next_block_hash)
833 # fixme: this is unsafe, if we revert when the undo info is not yet written
834 revert = (random.randint(1, 100) == 1) if self.is_test else False
836 if (next_block.get('previousblockhash') == self.last_hash) and not revert:
838 self.import_block(next_block, next_block_hash, self.height+1, sync)
839 self.height = self.height + 1
840 self.write_header(self.block2header(next_block), sync)
841 self.last_hash = next_block_hash
843 if self.height % 100 == 0 and not sync:
845 print_log("catch_up: block %d (%.3fs)" % (self.height, t2 - t1))
849 # revert current block
850 block = self.getfullblock(self.last_hash)
851 print_log("blockchain reorg", self.height, block.get('previousblockhash'), self.last_hash)
852 self.import_block(block, self.last_hash, self.height, sync, revert=True)
858 # read previous header from disk
859 self.header = self.read_header(self.height)
860 self.last_hash = self.hash_header(self.header)
862 self.header = self.block2header(self.bitcoind('getblock', [self.last_hash]))
864 def memorypool_update(self):
865 mempool_hashes = self.bitcoind('getrawmempool')
867 touched_addresses = []
868 for tx_hash in mempool_hashes:
869 if tx_hash in self.mempool_hashes:
872 tx = self.get_mempool_transaction(tx_hash)
876 mpa = self.mempool_addresses.get(tx_hash, [])
877 for x in tx.get('inputs'):
878 # we assume that the input address can be parsed by deserialize(); this is true for Electrum transactions
879 addr = x.get('address')
880 if addr and addr not in mpa:
882 touched_addresses.append(addr)
884 for x in tx.get('outputs'):
885 addr = x.get('address')
886 if addr and addr not in mpa:
888 touched_addresses.append(addr)
890 self.mempool_addresses[tx_hash] = mpa
891 self.mempool_hashes.append(tx_hash)
893 # remove older entries from mempool_hashes
894 self.mempool_hashes = mempool_hashes
896 # remove deprecated entries from mempool_addresses
897 for tx_hash, addresses in self.mempool_addresses.items():
898 if tx_hash not in self.mempool_hashes:
899 self.mempool_addresses.pop(tx_hash)
900 for addr in addresses:
901 touched_addresses.append(addr)
903 # rebuild mempool histories
904 new_mempool_hist = {}
905 for tx_hash, addresses in self.mempool_addresses.items():
906 for addr in addresses:
907 h = new_mempool_hist.get(addr, [])
910 new_mempool_hist[addr] = h
912 with self.mempool_lock:
913 self.mempool_hist = new_mempool_hist
915 # invalidate cache for touched addresses
916 for addr in touched_addresses:
917 self.invalidate_cache(addr)
920 def invalidate_cache(self, address):
921 with self.cache_lock:
922 if address in self.history_cache:
923 print_log("cache: invalidating", address)
924 self.history_cache.pop(address)
926 if address in self.watched_addresses:
927 # TODO: update cache here. if new value equals cached value, do not send notification
928 self.address_queue.put(address)
930 def main_iteration(self):
931 if self.shared.stopped():
932 print_log("blockchain processor terminating")
940 self.memorypool_update()
942 if self.sent_height != self.height:
943 self.sent_height = self.height
946 'method': 'blockchain.numblocks.subscribe',
947 'params': [self.height],
950 if self.sent_header != self.header:
951 print_log("blockchain: %d (%.3fs)" % (self.height, t2 - t1))
952 self.sent_header = self.header
955 'method': 'blockchain.headers.subscribe',
956 'params': [self.header],
961 addr = self.address_queue.get(False)
964 if addr in self.watched_addresses:
965 status = self.get_status(addr)
968 'method': 'blockchain.address.subscribe',
969 'params': [addr, status],
972 if not self.shared.stopped():
973 threading.Timer(10, self.main_iteration).start()
975 print_log("blockchain processor terminating")