use set instead of list in memorypool_update
[electrum-server.git] / backends / bitcoind / blockchain_processor.py
1 import ast
2 import hashlib
3 from json import dumps, loads
4 import leveldb
5 import os
6 from Queue import Queue
7 import random
8 import sys
9 import time
10 import threading
11 import traceback
12 import urllib
13
14 from backends.bitcoind import deserialize
15 from processor import Processor, print_log
16 from utils import *
17
18
19 class BlockchainProcessor(Processor):
20
21     def __init__(self, config, shared):
22         Processor.__init__(self)
23
24         self.shared = shared
25         self.config = config
26         self.up_to_date = False
27
28         self.watch_lock = threading.Lock()
29         self.watch_blocks = []
30         self.watch_headers = []
31         self.watched_addresses = {}
32
33         self.history_cache = {}
34         self.chunk_cache = {}
35         self.cache_lock = threading.Lock()
36         self.headers_data = ''
37
38         self.mempool_addresses = {}
39         self.mempool_hist = {}
40         self.mempool_hashes = set([])
41         self.mempool_lock = threading.Lock()
42
43         self.address_queue = Queue()
44         self.dbpath = config.get('leveldb', 'path')
45         self.pruning_limit = config.getint('leveldb', 'pruning_limit')
46         self.db_version = 1 # increase this when database needs to be updated
47
48         self.dblock = threading.Lock()
49         try:
50             self.db = leveldb.LevelDB(self.dbpath, paranoid_checks=True)
51         except:
52             traceback.print_exc(file=sys.stdout)
53             self.shared.stop()
54
55         self.bitcoind_url = 'http://%s:%s@%s:%s/' % (
56             config.get('bitcoind', 'user'),
57             config.get('bitcoind', 'password'),
58             config.get('bitcoind', 'host'),
59             config.get('bitcoind', 'port'))
60
61         while True:
62             try:
63                 self.bitcoind('getinfo')
64                 break
65             except:
66                 print_log('cannot contact bitcoind...')
67                 time.sleep(5)
68                 continue
69
70         self.height = 0
71         self.is_test = False
72         self.sent_height = 0
73         self.sent_header = None
74
75         try:
76             hist = self.deserialize(self.db.Get('height'))
77             self.last_hash, self.height, db_version = hist[0]
78             print_log("Database version", self.db_version)
79             print_log("Blockchain height", self.height)
80         except:
81             traceback.print_exc(file=sys.stdout)
82             print_log('initializing database')
83             self.height = 0
84             self.last_hash = '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'
85             db_version = self.db_version
86
87         # check version
88         if self.db_version != db_version:
89             print_log("Your database '%s' is deprecated. Please create a new database"%self.dbpath)
90             self.shared.stop()
91             return
92
93         # catch_up headers
94         self.init_headers(self.height)
95
96         threading.Timer(0, lambda: self.catch_up(sync=False)).start()
97         while not shared.stopped() and not self.up_to_date:
98             try:
99                 time.sleep(1)
100             except:
101                 print "keyboard interrupt: stopping threads"
102                 shared.stop()
103                 sys.exit(0)
104
105         print_log("Blockchain is up to date.")
106         self.memorypool_update()
107         print_log("Memory pool initialized.")
108
109         threading.Timer(10, self.main_iteration).start()
110
111     def bitcoind(self, method, params=[]):
112         postdata = dumps({"method": method, 'params': params, 'id': 'jsonrpc'})
113         try:
114             respdata = urllib.urlopen(self.bitcoind_url, postdata).read()
115         except:
116             traceback.print_exc(file=sys.stdout)
117             self.shared.stop()
118
119         r = loads(respdata)
120         if r['error'] is not None:
121             raise BaseException(r['error'])
122         return r.get('result')
123
124     def serialize(self, h):
125         s = ''
126         for txid, txpos, height in h:
127             s += self.serialize_item(txid, txpos, height)
128         return s
129
130     def serialize_item(self, txid, txpos, height, spent=chr(0)):
131         s = (txid + int_to_hex(txpos, 4) + int_to_hex(height, 3)).decode('hex') + spent 
132         return s
133
134     def deserialize_item(self,s):
135         txid = s[0:32].encode('hex')
136         txpos = int(rev_hex(s[32:36].encode('hex')), 16)
137         height = int(rev_hex(s[36:39].encode('hex')), 16)
138         spent = s[39:40]
139         return (txid, txpos, height, spent)
140
141     def deserialize(self, s):
142         h = []
143         while s:
144             txid, txpos, height, spent = self.deserialize_item(s[0:40])
145             h.append((txid, txpos, height))
146             if spent == chr(1):
147                 txid, txpos, height, spent = self.deserialize_item(s[40:80])
148                 h.append((txid, txpos, height))
149             s = s[80:]
150         return h
151
152     def block2header(self, b):
153         return {
154             "block_height": b.get('height'),
155             "version": b.get('version'),
156             "prev_block_hash": b.get('previousblockhash'),
157             "merkle_root": b.get('merkleroot'),
158             "timestamp": b.get('time'),
159             "bits": int(b.get('bits'), 16),
160             "nonce": b.get('nonce'),
161         }
162
163     def get_header(self, height):
164         block_hash = self.bitcoind('getblockhash', [height])
165         b = self.bitcoind('getblock', [block_hash])
166         return self.block2header(b)
167
168     def init_headers(self, db_height):
169         self.chunk_cache = {}
170         self.headers_filename = os.path.join(self.dbpath, 'blockchain_headers')
171
172         if os.path.exists(self.headers_filename):
173             height = os.path.getsize(self.headers_filename)/80 - 1   # the current height
174             if height > 0:
175                 prev_hash = self.hash_header(self.read_header(height))
176             else:
177                 prev_hash = None
178         else:
179             open(self.headers_filename, 'wb').close()
180             prev_hash = None
181             height = -1
182
183         if height < db_height:
184             print_log("catching up missing headers:", height, db_height)
185
186         try:
187             while height < db_height:
188                 height = height + 1
189                 header = self.get_header(height)
190                 if height > 1:
191                     assert prev_hash == header.get('prev_block_hash')
192                 self.write_header(header, sync=False)
193                 prev_hash = self.hash_header(header)
194                 if (height % 1000) == 0:
195                     print_log("headers file:", height)
196         except KeyboardInterrupt:
197             self.flush_headers()
198             sys.exit()
199
200         self.flush_headers()
201
202     def hash_header(self, header):
203         return rev_hex(Hash(header_to_string(header).decode('hex')).encode('hex'))
204
205     def read_header(self, block_height):
206         if os.path.exists(self.headers_filename):
207             with open(self.headers_filename, 'rb') as f:
208                 f.seek(block_height * 80)
209                 h = f.read(80)
210             if len(h) == 80:
211                 h = header_from_string(h)
212                 return h
213
214     def read_chunk(self, index):
215         with open(self.headers_filename, 'rb') as f:
216             f.seek(index*2016*80)
217             chunk = f.read(2016*80)
218         return chunk.encode('hex')
219
220     def write_header(self, header, sync=True):
221         if not self.headers_data:
222             self.headers_offset = header.get('block_height')
223
224         self.headers_data += header_to_string(header).decode('hex')
225         if sync or len(self.headers_data) > 40*100:
226             self.flush_headers()
227
228         with self.cache_lock:
229             chunk_index = header.get('block_height')/2016
230             if self.chunk_cache.get(chunk_index):
231                 self.chunk_cache.pop(chunk_index)
232
233     def pop_header(self):
234         # we need to do this only if we have not flushed
235         if self.headers_data:
236             self.headers_data = self.headers_data[:-40]
237
238     def flush_headers(self):
239         if not self.headers_data:
240             return
241         with open(self.headers_filename, 'rb+') as f:
242             f.seek(self.headers_offset*80)
243             f.write(self.headers_data)
244         self.headers_data = ''
245
246     def get_chunk(self, i):
247         # store them on disk; store the current chunk in memory
248         with self.cache_lock:
249             chunk = self.chunk_cache.get(i)
250             if not chunk:
251                 chunk = self.read_chunk(i)
252                 self.chunk_cache[i] = chunk
253
254         return chunk
255
256     def get_mempool_transaction(self, txid):
257         try:
258             raw_tx = self.bitcoind('getrawtransaction', [txid, 0])
259         except:
260             return None
261
262         vds = deserialize.BCDataStream()
263         vds.write(raw_tx.decode('hex'))
264         try:
265             return deserialize.parse_Transaction(vds, is_coinbase=False)
266         except:
267             print_log("ERROR: cannot parse", txid)
268             return None
269
270     def get_history(self, addr, cache_only=False):
271         with self.cache_lock:
272             hist = self.history_cache.get(addr)
273         if hist is not None:
274             return hist
275         if cache_only:
276             return -1
277
278         with self.dblock:
279             try:
280                 hist = self.deserialize(self.db.Get(addr))
281                 is_known = True
282             except:
283                 hist = []
284                 is_known = False
285
286         # sort history, because redeeming transactions are next to the corresponding txout
287         hist.sort(key=lambda tup: tup[2])
288
289         # add memory pool
290         with self.mempool_lock:
291             for txid in self.mempool_hist.get(addr, []):
292                 hist.append((txid, 0, 0))
293
294         # uniqueness
295         hist = set(map(lambda x: (x[0], x[2]), hist))
296
297         # convert to dict
298         hist = map(lambda x: {'tx_hash': x[0], 'height': x[1]}, hist)
299
300         # add something to distinguish between unused and empty addresses
301         if hist == [] and is_known:
302             hist = ['*']
303
304         with self.cache_lock:
305             self.history_cache[addr] = hist
306         return hist
307
308     def get_status(self, addr, cache_only=False):
309         tx_points = self.get_history(addr, cache_only)
310         if cache_only and tx_points == -1:
311             return -1
312
313         if not tx_points:
314             return None
315         if tx_points == ['*']:
316             return '*'
317         status = ''
318         for tx in tx_points:
319             status += tx.get('tx_hash') + ':%d:' % tx.get('height')
320         return hashlib.sha256(status).digest().encode('hex')
321
322     def get_merkle(self, tx_hash, height):
323
324         block_hash = self.bitcoind('getblockhash', [height])
325         b = self.bitcoind('getblock', [block_hash])
326         tx_list = b.get('tx')
327         tx_pos = tx_list.index(tx_hash)
328
329         merkle = map(hash_decode, tx_list)
330         target_hash = hash_decode(tx_hash)
331         s = []
332         while len(merkle) != 1:
333             if len(merkle) % 2:
334                 merkle.append(merkle[-1])
335             n = []
336             while merkle:
337                 new_hash = Hash(merkle[0] + merkle[1])
338                 if merkle[0] == target_hash:
339                     s.append(hash_encode(merkle[1]))
340                     target_hash = new_hash
341                 elif merkle[1] == target_hash:
342                     s.append(hash_encode(merkle[0]))
343                     target_hash = new_hash
344                 n.append(new_hash)
345                 merkle = merkle[2:]
346             merkle = n
347
348         return {"block_height": height, "merkle": s, "pos": tx_pos}
349
350
351     def add_to_history(self, addr, tx_hash, tx_pos, tx_height):
352         # keep it sorted
353         s = self.serialize_item(tx_hash, tx_pos, tx_height) + 40*chr(0)
354         assert len(s) == 80
355
356         serialized_hist = self.batch_list[addr]
357
358         l = len(serialized_hist)/80
359         for i in range(l-1, -1, -1):
360             item = serialized_hist[80*i:80*(i+1)]
361             item_height = int(rev_hex(item[36:39].encode('hex')), 16)
362             if item_height <= tx_height:
363                 serialized_hist = serialized_hist[0:80*(i+1)] + s + serialized_hist[80*(i+1):]
364                 break
365         else:
366             serialized_hist = s + serialized_hist
367
368         self.batch_list[addr] = serialized_hist
369
370         # backlink
371         txo = (tx_hash + int_to_hex(tx_pos, 4)).decode('hex')
372         self.batch_txio[txo] = addr
373
374
375
376     def revert_add_to_history(self, addr, tx_hash, tx_pos, tx_height):
377
378         serialized_hist = self.batch_list[addr]
379         s = self.serialize_item(tx_hash, tx_pos, tx_height) + 40*chr(0)
380         if serialized_hist.find(s) == -1: raise
381         serialized_hist = serialized_hist.replace(s, '')
382         self.batch_list[addr] = serialized_hist
383
384
385
386     def prune_history(self, addr, undo):
387         # remove items that have bit set to one
388         if undo.get(addr) is None: undo[addr] = []
389
390         serialized_hist = self.batch_list[addr]
391         l = len(serialized_hist)/80
392         for i in range(l):
393             if len(serialized_hist)/80 < self.pruning_limit: break
394             item = serialized_hist[80*i:80*(i+1)] 
395             if item[39:40] == chr(1):
396                 assert item[79:80] == chr(2)
397                 serialized_hist = serialized_hist[0:80*i] + serialized_hist[80*(i+1):]
398                 undo[addr].append(item)  # items are ordered
399         self.batch_list[addr] = serialized_hist
400
401
402     def revert_prune_history(self, addr, undo):
403         # restore removed items
404         serialized_hist = self.batch_list[addr]
405
406         if undo.get(addr) is not None: 
407             itemlist = undo.pop(addr)
408         else:
409             return 
410
411         if not itemlist: return
412
413         l = len(serialized_hist)/80
414         tx_item = ''
415         for i in range(l-1, -1, -1):
416             if tx_item == '':
417                 if not itemlist: 
418                     break
419                 else:
420                     tx_item = itemlist.pop(-1) # get the last element
421                     tx_height = int(rev_hex(tx_item[36:39].encode('hex')), 16)
422             
423             item = serialized_hist[80*i:80*(i+1)]
424             item_height = int(rev_hex(item[36:39].encode('hex')), 16)
425
426             if item_height < tx_height:
427                 serialized_hist = serialized_hist[0:80*(i+1)] + tx_item + serialized_hist[80*(i+1):]
428                 tx_item = ''
429
430         else:
431             serialized_hist = ''.join(itemlist) + tx_item + serialized_hist
432
433         self.batch_list[addr] = serialized_hist
434
435
436     def set_spent_bit(self, addr, txi, is_spent, txid=None, index=None, height=None):
437         serialized_hist = self.batch_list[addr]
438         l = len(serialized_hist)/80
439         for i in range(l):
440             item = serialized_hist[80*i:80*(i+1)]
441             if item[0:36] == txi:
442                 if is_spent:
443                     new_item = item[0:39] + chr(1) + self.serialize_item(txid, index, height, chr(2))
444                 else:
445                     new_item = item[0:39] + chr(0) + chr(0)*40 
446                 serialized_hist = serialized_hist[0:80*i] + new_item + serialized_hist[80*(i+1):]
447                 break
448         else:
449             self.shared.stop()
450             hist = self.deserialize(serialized_hist)
451             raise BaseException("prevout not found", addr, hist, txi.encode('hex'))
452
453         self.batch_list[addr] = serialized_hist
454
455
456     def unset_spent_bit(self, addr, txi):
457         self.set_spent_bit(addr, txi, False)
458         self.batch_txio[txi] = addr
459
460
461     def deserialize_block(self, block):
462         txlist = block.get('tx')
463         tx_hashes = []  # ordered txids
464         txdict = {}     # deserialized tx
465         is_coinbase = True
466         for raw_tx in txlist:
467             tx_hash = hash_encode(Hash(raw_tx.decode('hex')))
468             vds = deserialize.BCDataStream()
469             vds.write(raw_tx.decode('hex'))
470             try:
471                 tx = deserialize.parse_Transaction(vds, is_coinbase)
472             except:
473                 print_log("ERROR: cannot parse", tx_hash)
474                 continue
475             tx_hashes.append(tx_hash)
476             txdict[tx_hash] = tx
477             is_coinbase = False
478         return tx_hashes, txdict
479
480     def get_undo_info(self, height):
481         s = self.db.Get("undo%d" % (height % 100))
482         return eval(s)
483
484     def write_undo_info(self, batch, height, undo_info):
485         if self.is_test or height > self.bitcoind_height - 100:
486             batch.Put("undo%d" % (height % 100), repr(undo_info))
487
488     def import_block(self, block, block_hash, block_height, sync, revert=False):
489
490         self.batch_list = {}  # address -> history
491         self.batch_txio = {}  # transaction i/o -> address
492
493         block_inputs = []
494         block_outputs = []
495         addr_to_read = []
496
497         # deserialize transactions
498         t0 = time.time()
499         tx_hashes, txdict = self.deserialize_block(block)
500
501         t00 = time.time()
502
503         # undo info
504         if revert:
505             undo_info = self.get_undo_info(block_height)
506         else:
507             undo_info = {}
508
509
510         if not revert:
511             # read addresses of tx inputs
512             for tx in txdict.values():
513                 for x in tx.get('inputs'):
514                     txi = (x.get('prevout_hash') + int_to_hex(x.get('prevout_n'), 4)).decode('hex')
515                     block_inputs.append(txi)
516
517             block_inputs.sort()
518             for txi in block_inputs:
519                 try:
520                     addr = self.db.Get(txi)
521                 except KeyError:
522                     # the input could come from the same block
523                     continue
524                 except:
525                     traceback.print_exc(file=sys.stdout)
526                     self.shared.stop()
527                     raise
528
529                 self.batch_txio[txi] = addr
530                 addr_to_read.append(addr)
531
532         else:
533             for txid, tx in txdict.items():
534                 for x in tx.get('outputs'):
535                     txo = (txid + int_to_hex(x.get('index'), 4)).decode('hex')
536                     block_outputs.append(txo)
537                     addr_to_read.append( x.get('address') )
538
539                 undo = undo_info.get(txid)
540                 for i, x in enumerate(tx.get('inputs')):
541                     addr = undo['prev_addr'][i]
542                     addr_to_read.append(addr)
543
544
545
546
547
548         # read histories of addresses
549         for txid, tx in txdict.items():
550             for x in tx.get('outputs'):
551                 addr_to_read.append(x.get('address'))
552
553         addr_to_read.sort()
554         for addr in addr_to_read:
555             try:
556                 self.batch_list[addr] = self.db.Get(addr)
557             except KeyError:
558                 self.batch_list[addr] = ''
559             except:
560                 traceback.print_exc(file=sys.stdout)
561                 self.shared.stop()
562                 raise
563
564
565         # process
566         t1 = time.time()
567
568         if revert:
569             tx_hashes = tx_hashes[::-1]
570
571
572         for txid in tx_hashes:  # must be ordered
573             tx = txdict[txid]
574             if not revert:
575
576                 undo = { 'prev_addr':[] } # contains the list of pruned items for each address in the tx; also, 'prev_addr' is a list of prev addresses
577                 
578                 prev_addr = []
579                 for i, x in enumerate(tx.get('inputs')):
580                     txi = (x.get('prevout_hash') + int_to_hex(x.get('prevout_n'), 4)).decode('hex')
581                     addr = self.batch_txio[txi]
582
583                     # add redeem item to the history.
584                     # add it right next to the input txi? this will break history sorting, but it's ok if I neglect tx inputs during search
585                     self.set_spent_bit(addr, txi, True, txid, i, block_height)
586
587                     # when I prune, prune a pair
588                     self.prune_history(addr, undo)
589                     prev_addr.append(addr)
590
591                 undo['prev_addr'] = prev_addr 
592
593                 # here I add only the outputs to history; maybe I want to add inputs too (that's in the other loop)
594                 for x in tx.get('outputs'):
595                     addr = x.get('address')
596                     self.add_to_history(addr, txid, x.get('index'), block_height)
597                     self.prune_history(addr, undo)  # prune here because we increased the length of the history
598
599                 undo_info[txid] = undo
600
601             else:
602
603                 undo = undo_info.pop(txid)
604
605                 for x in tx.get('outputs'):
606                     addr = x.get('address')
607                     self.revert_prune_history(addr, undo)
608                     self.revert_add_to_history(addr, txid, x.get('index'), block_height)
609
610                 prev_addr = undo.pop('prev_addr')
611                 for i, x in enumerate(tx.get('inputs')):
612                     addr = prev_addr[i]
613                     self.revert_prune_history(addr, undo)
614                     txi = (x.get('prevout_hash') + int_to_hex(x.get('prevout_n'), 4)).decode('hex')
615                     self.unset_spent_bit(addr, txi)
616
617                 assert undo == {}
618
619         if revert: 
620             assert undo_info == {}
621
622
623         # write
624         max_len = 0
625         max_addr = ''
626         t2 = time.time()
627
628         batch = leveldb.WriteBatch()
629         for addr, serialized_hist in self.batch_list.items():
630             batch.Put(addr, serialized_hist)
631             l = len(serialized_hist)/80
632             if l > max_len:
633                 max_len = l
634                 max_addr = addr
635
636         if not revert:
637             # add new created outputs
638             for txio, addr in self.batch_txio.items():
639                 batch.Put(txio, addr)
640             # delete spent inputs
641             for txi in block_inputs:
642                 batch.Delete(txi)
643             # add undo info
644             self.write_undo_info(batch, block_height, undo_info)
645         else:
646             # restore spent inputs
647             for txio, addr in self.batch_txio.items():
648                 # print "restoring spent input", repr(txio)
649                 batch.Put(txio, addr)
650             # delete spent outputs
651             for txo in block_outputs:
652                 batch.Delete(txo)
653
654         # add the max
655         batch.Put('height', self.serialize([(block_hash, block_height, self.db_version)]))
656
657         # actual write
658         self.db.Write(batch, sync=sync)
659
660         t3 = time.time()
661         if t3 - t0 > 10 and not sync:
662             print_log("block", block_height,
663                       "parse:%0.2f " % (t00 - t0),
664                       "read:%0.2f " % (t1 - t00),
665                       "proc:%.2f " % (t2-t1),
666                       "write:%.2f " % (t3-t2),
667                       "max:", max_len, max_addr)
668
669         for addr in self.batch_list.keys():
670             self.invalidate_cache(addr)
671
672     def add_request(self, session, request):
673         # see if we can get if from cache. if not, add to queue
674         if self.process(session, request, cache_only=True) == -1:
675             self.queue.put((session, request))
676
677
678
679
680     def process(self, session, request, cache_only=False):
681         
682         message_id = request['id']
683         method = request['method']
684         params = request.get('params', [])
685         result = None
686         error = None
687
688         if method == 'blockchain.numblocks.subscribe':
689             with self.watch_lock:
690                 if session not in self.watch_blocks:
691                     self.watch_blocks.append(session)
692             result = self.height
693
694         elif method == 'blockchain.headers.subscribe':
695             with self.watch_lock:
696                 if session not in self.watch_headers:
697                     self.watch_headers.append(session)
698             result = self.header
699
700         elif method == 'blockchain.address.subscribe':
701             try:
702                 address = params[0]
703                 result = self.get_status(address, cache_only)
704                 with self.watch_lock:
705                     l = self.watched_addresses.get(address)
706                     if l is None:
707                         self.watched_addresses[address] = [session]
708                     elif session not in l:
709                         l.append(session)
710
711             except BaseException, e:
712                 error = str(e) + ': ' + address
713                 print_log("error:", error)
714
715
716         elif method == 'blockchain.address.get_history':
717             try:
718                 address = params[0]
719                 result = self.get_history(address, cache_only)
720             except BaseException, e:
721                 error = str(e) + ': ' + address
722                 print_log("error:", error)
723
724         elif method == 'blockchain.block.get_header':
725             if cache_only:
726                 result = -1
727             else:
728                 try:
729                     height = params[0]
730                     result = self.get_header(height)
731                 except BaseException, e:
732                     error = str(e) + ': %d' % height
733                     print_log("error:", error)
734
735         elif method == 'blockchain.block.get_chunk':
736             if cache_only:
737                 result = -1
738             else:
739                 try:
740                     index = params[0]
741                     result = self.get_chunk(index)
742                 except BaseException, e:
743                     error = str(e) + ': %d' % index
744                     print_log("error:", error)
745
746         elif method == 'blockchain.transaction.broadcast':
747             try:
748                 txo = self.bitcoind('sendrawtransaction', params)
749                 print_log("sent tx:", txo)
750                 result = txo
751             except BaseException, e:
752                 result = str(e)  # do not send an error
753                 print_log("error:", result, params)
754
755         elif method == 'blockchain.transaction.get_merkle':
756             if cache_only:
757                 result = -1
758             else:
759                 try:
760                     tx_hash = params[0]
761                     tx_height = params[1]
762                     result = self.get_merkle(tx_hash, tx_height)
763                 except BaseException, e:
764                     error = str(e) + ': ' + repr(params)
765                     print_log("get_merkle error:", error)
766
767         elif method == 'blockchain.transaction.get':
768             try:
769                 tx_hash = params[0]
770                 result = self.bitcoind('getrawtransaction', [tx_hash, 0])
771             except BaseException, e:
772                 error = str(e) + ': ' + repr(params)
773                 print_log("tx get error:", error)
774
775         else:
776             error = "unknown method:%s" % method
777
778         if cache_only and result == -1:
779             return -1
780
781         if error:
782             self.push_response(session, {'id': message_id, 'error': error})
783         elif result != '':
784             self.push_response(session, {'id': message_id, 'result': result})
785
786
787     def getfullblock(self, block_hash):
788         block = self.bitcoind('getblock', [block_hash])
789
790         rawtxreq = []
791         i = 0
792         for txid in block['tx']:
793             rawtxreq.append({
794                 "method": "getrawtransaction",
795                 "params": [txid],
796                 "id": i,
797             })
798             i += 1
799
800         postdata = dumps(rawtxreq)
801         try:
802             respdata = urllib.urlopen(self.bitcoind_url, postdata).read()
803         except:
804             traceback.print_exc(file=sys.stdout)
805             self.shared.stop()
806
807         r = loads(respdata)
808         rawtxdata = []
809         for ir in r:
810             if ir['error'] is not None:
811                 self.shared.stop()
812                 print_log("Error: make sure you run bitcoind with txindex=1; use -reindex if needed.")
813                 raise BaseException(ir['error'])
814             rawtxdata.append(ir['result'])
815         block['tx'] = rawtxdata
816         return block
817
818     def catch_up(self, sync=True):
819         t1 = time.time()
820
821         while not self.shared.stopped():
822             # are we done yet?
823             info = self.bitcoind('getinfo')
824             self.bitcoind_height = info.get('blocks')
825             bitcoind_block_hash = self.bitcoind('getblockhash', [self.bitcoind_height])
826             if self.last_hash == bitcoind_block_hash:
827                 self.up_to_date = True
828                 break
829
830             # not done..
831             self.up_to_date = False
832             next_block_hash = self.bitcoind('getblockhash', [self.height + 1])
833             next_block = self.getfullblock(next_block_hash)
834
835             # fixme: this is unsafe, if we revert when the undo info is not yet written
836             revert = (random.randint(1, 100) == 1) if self.is_test else False
837
838             if (next_block.get('previousblockhash') == self.last_hash) and not revert:
839
840                 self.import_block(next_block, next_block_hash, self.height+1, sync)
841                 self.height = self.height + 1
842                 self.write_header(self.block2header(next_block), sync)
843                 self.last_hash = next_block_hash
844
845                 if self.height % 100 == 0 and not sync:
846                     t2 = time.time()
847                     print_log("catch_up: block %d (%.3fs)" % (self.height, t2 - t1))
848                     t1 = t2
849
850             else:
851                 # revert current block
852                 block = self.getfullblock(self.last_hash)
853                 print_log("blockchain reorg", self.height, block.get('previousblockhash'), self.last_hash)
854                 self.import_block(block, self.last_hash, self.height, sync, revert=True)
855                 self.pop_header()
856                 self.flush_headers()
857
858                 self.height -= 1
859
860                 # read previous header from disk
861                 self.header = self.read_header(self.height)
862                 self.last_hash = self.hash_header(self.header)
863
864         self.header = self.block2header(self.bitcoind('getblock', [self.last_hash]))
865
866
867     def memorypool_update(self):
868         mempool_hashes = set(self.bitcoind('getrawmempool'))
869         touched_addresses = set([])
870
871         for tx_hash in mempool_hashes:
872             if tx_hash in self.mempool_hashes:
873                 continue
874
875             tx = self.get_mempool_transaction(tx_hash)
876             if not tx:
877                 continue
878
879             mpa = self.mempool_addresses.get(tx_hash, [])
880             for x in tx.get('inputs'):
881                 # we assume that the input address can be parsed by deserialize(); this is true for Electrum transactions
882                 addr = x.get('address')
883                 if addr and addr not in mpa:
884                     mpa.append(addr)
885                     touched_addresses.add(addr)
886
887             for x in tx.get('outputs'):
888                 addr = x.get('address')
889                 if addr and addr not in mpa:
890                     mpa.append(addr)
891                     touched_addresses.add(addr)
892
893             self.mempool_addresses[tx_hash] = mpa
894             self.mempool_hashes.add(tx_hash)
895
896         # remove older entries from mempool_hashes
897         self.mempool_hashes = mempool_hashes
898
899         # remove deprecated entries from mempool_addresses
900         for tx_hash, addresses in self.mempool_addresses.items():
901             if tx_hash not in self.mempool_hashes:
902                 self.mempool_addresses.pop(tx_hash)
903                 for addr in addresses:
904                     touched_addresses.add(addr)
905
906         # rebuild mempool histories
907         new_mempool_hist = {}
908         for tx_hash, addresses in self.mempool_addresses.items():
909             for addr in addresses:
910                 h = new_mempool_hist.get(addr, [])
911                 if tx_hash not in h:
912                     h.append(tx_hash)
913                 new_mempool_hist[addr] = h
914
915         with self.mempool_lock:
916             self.mempool_hist = new_mempool_hist
917
918         # invalidate cache for touched addresses
919         for addr in touched_addresses:
920             self.invalidate_cache(addr)
921
922
923     def invalidate_cache(self, address):
924         with self.cache_lock:
925             if address in self.history_cache:
926                 print_log("cache: invalidating", address)
927                 self.history_cache.pop(address)
928
929         with self.watch_lock:
930             sessions = self.watched_addresses.get(address)
931
932         if sessions:
933             # TODO: update cache here. if new value equals cached value, do not send notification
934             self.address_queue.put((address,sessions))
935
936     def main_iteration(self):
937         if self.shared.stopped():
938             print_log("blockchain processor terminating")
939             return
940
941         with self.dblock:
942             t1 = time.time()
943             self.catch_up()
944             t2 = time.time()
945
946         self.memorypool_update()
947
948         if self.sent_height != self.height:
949             self.sent_height = self.height
950             for session in self.watch_blocks:
951                 self.push_response(session, {
952                         'id': None,
953                         'method': 'blockchain.numblocks.subscribe',
954                         'params': [self.height],
955                         })
956
957         if self.sent_header != self.header:
958             print_log("blockchain: %d (%.3fs)" % (self.height, t2 - t1))
959             self.sent_header = self.header
960             for session in self.watch_headers:
961                 self.push_response(session, {
962                         'id': None,
963                         'method': 'blockchain.headers.subscribe',
964                         'params': [self.header],
965                         })
966
967         while True:
968             try:
969                 addr, sessions = self.address_queue.get(False)
970             except:
971                 break
972
973             status = self.get_status(addr)
974             for session in sessions:
975                 self.push_response(session, {
976                         'id': None,
977                         'method': 'blockchain.address.subscribe',
978                         'params': [addr, status],
979                         })
980
981         if not self.shared.stopped():
982             threading.Timer(10, self.main_iteration).start()
983         else:
984             print_log("blockchain processor terminating")