fix memleak with subscriptions
[electrum-server.git] / backends / bitcoind / blockchain_processor.py
1 import ast
2 import hashlib
3 from json import dumps, loads
4 import leveldb
5 import os
6 from Queue import Queue
7 import random
8 import sys
9 import time
10 import threading
11 import traceback
12 import urllib
13
14 from backends.bitcoind import deserialize
15 from processor import Processor, print_log
16 from utils import *
17
18
19 class BlockchainProcessor(Processor):
20
21     def __init__(self, config, shared):
22         Processor.__init__(self)
23
24         self.shared = shared
25         self.config = config
26         self.up_to_date = False
27
28         self.watch_lock = threading.Lock()
29         self.watch_blocks = []
30         self.watch_headers = []
31         self.watched_addresses = {}
32
33         self.history_cache = {}
34         self.chunk_cache = {}
35         self.cache_lock = threading.Lock()
36         self.headers_data = ''
37
38         self.mempool_addresses = {}
39         self.mempool_hist = {}
40         self.mempool_hashes = set([])
41         self.mempool_lock = threading.Lock()
42
43         self.address_queue = Queue()
44         self.dbpath = config.get('leveldb', 'path')
45         self.pruning_limit = config.getint('leveldb', 'pruning_limit')
46         self.db_version = 1 # increase this when database needs to be updated
47
48         self.dblock = threading.Lock()
49         try:
50             self.db = leveldb.LevelDB(self.dbpath, paranoid_checks=True)
51         except:
52             traceback.print_exc(file=sys.stdout)
53             self.shared.stop()
54
55         self.bitcoind_url = 'http://%s:%s@%s:%s/' % (
56             config.get('bitcoind', 'user'),
57             config.get('bitcoind', 'password'),
58             config.get('bitcoind', 'host'),
59             config.get('bitcoind', 'port'))
60
61         while True:
62             try:
63                 self.bitcoind('getinfo')
64                 break
65             except:
66                 print_log('cannot contact bitcoind...')
67                 time.sleep(5)
68                 continue
69
70         self.height = 0
71         self.is_test = False
72         self.sent_height = 0
73         self.sent_header = None
74
75         try:
76             hist = self.deserialize(self.db.Get('height'))
77             self.last_hash, self.height, db_version = hist[0]
78             print_log("Database version", self.db_version)
79             print_log("Blockchain height", self.height)
80         except:
81             traceback.print_exc(file=sys.stdout)
82             print_log('initializing database')
83             self.height = 0
84             self.last_hash = '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'
85             db_version = self.db_version
86
87         # check version
88         if self.db_version != db_version:
89             print_log("Your database '%s' is deprecated. Please create a new database"%self.dbpath)
90             self.shared.stop()
91             return
92
93         # catch_up headers
94         self.init_headers(self.height)
95
96         threading.Timer(0, lambda: self.catch_up(sync=False)).start()
97         while not shared.stopped() and not self.up_to_date:
98             try:
99                 time.sleep(1)
100             except:
101                 print "keyboard interrupt: stopping threads"
102                 shared.stop()
103                 sys.exit(0)
104
105         print_log("Blockchain is up to date.")
106         self.memorypool_update()
107         print_log("Memory pool initialized.")
108
109         threading.Timer(10, self.main_iteration).start()
110
111     def bitcoind(self, method, params=[]):
112         postdata = dumps({"method": method, 'params': params, 'id': 'jsonrpc'})
113         try:
114             respdata = urllib.urlopen(self.bitcoind_url, postdata).read()
115         except:
116             traceback.print_exc(file=sys.stdout)
117             self.shared.stop()
118
119         r = loads(respdata)
120         if r['error'] is not None:
121             raise BaseException(r['error'])
122         return r.get('result')
123
124     def serialize(self, h):
125         s = ''
126         for txid, txpos, height in h:
127             s += self.serialize_item(txid, txpos, height)
128         return s
129
130     def serialize_item(self, txid, txpos, height, spent=chr(0)):
131         s = (txid + int_to_hex(txpos, 4) + int_to_hex(height, 3)).decode('hex') + spent 
132         return s
133
134     def deserialize_item(self,s):
135         txid = s[0:32].encode('hex')
136         txpos = int(rev_hex(s[32:36].encode('hex')), 16)
137         height = int(rev_hex(s[36:39].encode('hex')), 16)
138         spent = s[39:40]
139         return (txid, txpos, height, spent)
140
141     def deserialize(self, s):
142         h = []
143         while s:
144             txid, txpos, height, spent = self.deserialize_item(s[0:40])
145             h.append((txid, txpos, height))
146             if spent == chr(1):
147                 txid, txpos, height, spent = self.deserialize_item(s[40:80])
148                 h.append((txid, txpos, height))
149             s = s[80:]
150         return h
151
152     def block2header(self, b):
153         return {
154             "block_height": b.get('height'),
155             "version": b.get('version'),
156             "prev_block_hash": b.get('previousblockhash'),
157             "merkle_root": b.get('merkleroot'),
158             "timestamp": b.get('time'),
159             "bits": int(b.get('bits'), 16),
160             "nonce": b.get('nonce'),
161         }
162
163     def get_header(self, height):
164         block_hash = self.bitcoind('getblockhash', [height])
165         b = self.bitcoind('getblock', [block_hash])
166         return self.block2header(b)
167
168     def init_headers(self, db_height):
169         self.chunk_cache = {}
170         self.headers_filename = os.path.join(self.dbpath, 'blockchain_headers')
171
172         if os.path.exists(self.headers_filename):
173             height = os.path.getsize(self.headers_filename)/80 - 1   # the current height
174             if height > 0:
175                 prev_hash = self.hash_header(self.read_header(height))
176             else:
177                 prev_hash = None
178         else:
179             open(self.headers_filename, 'wb').close()
180             prev_hash = None
181             height = -1
182
183         if height < db_height:
184             print_log("catching up missing headers:", height, db_height)
185
186         try:
187             while height < db_height:
188                 height = height + 1
189                 header = self.get_header(height)
190                 if height > 1:
191                     assert prev_hash == header.get('prev_block_hash')
192                 self.write_header(header, sync=False)
193                 prev_hash = self.hash_header(header)
194                 if (height % 1000) == 0:
195                     print_log("headers file:", height)
196         except KeyboardInterrupt:
197             self.flush_headers()
198             sys.exit()
199
200         self.flush_headers()
201
202     def hash_header(self, header):
203         return rev_hex(Hash(header_to_string(header).decode('hex')).encode('hex'))
204
205     def read_header(self, block_height):
206         if os.path.exists(self.headers_filename):
207             with open(self.headers_filename, 'rb') as f:
208                 f.seek(block_height * 80)
209                 h = f.read(80)
210             if len(h) == 80:
211                 h = header_from_string(h)
212                 return h
213
214     def read_chunk(self, index):
215         with open(self.headers_filename, 'rb') as f:
216             f.seek(index*2016*80)
217             chunk = f.read(2016*80)
218         return chunk.encode('hex')
219
220     def write_header(self, header, sync=True):
221         if not self.headers_data:
222             self.headers_offset = header.get('block_height')
223
224         self.headers_data += header_to_string(header).decode('hex')
225         if sync or len(self.headers_data) > 40*100:
226             self.flush_headers()
227
228         with self.cache_lock:
229             chunk_index = header.get('block_height')/2016
230             if self.chunk_cache.get(chunk_index):
231                 self.chunk_cache.pop(chunk_index)
232
233     def pop_header(self):
234         # we need to do this only if we have not flushed
235         if self.headers_data:
236             self.headers_data = self.headers_data[:-40]
237
238     def flush_headers(self):
239         if not self.headers_data:
240             return
241         with open(self.headers_filename, 'rb+') as f:
242             f.seek(self.headers_offset*80)
243             f.write(self.headers_data)
244         self.headers_data = ''
245
246     def get_chunk(self, i):
247         # store them on disk; store the current chunk in memory
248         with self.cache_lock:
249             chunk = self.chunk_cache.get(i)
250             if not chunk:
251                 chunk = self.read_chunk(i)
252                 self.chunk_cache[i] = chunk
253
254         return chunk
255
256     def get_mempool_transaction(self, txid):
257         try:
258             raw_tx = self.bitcoind('getrawtransaction', [txid, 0])
259         except:
260             return None
261
262         vds = deserialize.BCDataStream()
263         vds.write(raw_tx.decode('hex'))
264         try:
265             return deserialize.parse_Transaction(vds, is_coinbase=False)
266         except:
267             print_log("ERROR: cannot parse", txid)
268             return None
269
270     def get_history(self, addr, cache_only=False):
271         with self.cache_lock:
272             hist = self.history_cache.get(addr)
273         if hist is not None:
274             return hist
275         if cache_only:
276             return -1
277
278         with self.dblock:
279             try:
280                 hist = self.deserialize(self.db.Get(addr))
281                 is_known = True
282             except:
283                 hist = []
284                 is_known = False
285
286         # sort history, because redeeming transactions are next to the corresponding txout
287         hist.sort(key=lambda tup: tup[2])
288
289         # add memory pool
290         with self.mempool_lock:
291             for txid in self.mempool_hist.get(addr, []):
292                 hist.append((txid, 0, 0))
293
294         # uniqueness
295         hist = set(map(lambda x: (x[0], x[2]), hist))
296
297         # convert to dict
298         hist = map(lambda x: {'tx_hash': x[0], 'height': x[1]}, hist)
299
300         # add something to distinguish between unused and empty addresses
301         if hist == [] and is_known:
302             hist = ['*']
303
304         with self.cache_lock:
305             self.history_cache[addr] = hist
306         return hist
307
308     def get_status(self, addr, cache_only=False):
309         tx_points = self.get_history(addr, cache_only)
310         if cache_only and tx_points == -1:
311             return -1
312
313         if not tx_points:
314             return None
315         if tx_points == ['*']:
316             return '*'
317         status = ''
318         for tx in tx_points:
319             status += tx.get('tx_hash') + ':%d:' % tx.get('height')
320         return hashlib.sha256(status).digest().encode('hex')
321
322     def get_merkle(self, tx_hash, height):
323
324         block_hash = self.bitcoind('getblockhash', [height])
325         b = self.bitcoind('getblock', [block_hash])
326         tx_list = b.get('tx')
327         tx_pos = tx_list.index(tx_hash)
328
329         merkle = map(hash_decode, tx_list)
330         target_hash = hash_decode(tx_hash)
331         s = []
332         while len(merkle) != 1:
333             if len(merkle) % 2:
334                 merkle.append(merkle[-1])
335             n = []
336             while merkle:
337                 new_hash = Hash(merkle[0] + merkle[1])
338                 if merkle[0] == target_hash:
339                     s.append(hash_encode(merkle[1]))
340                     target_hash = new_hash
341                 elif merkle[1] == target_hash:
342                     s.append(hash_encode(merkle[0]))
343                     target_hash = new_hash
344                 n.append(new_hash)
345                 merkle = merkle[2:]
346             merkle = n
347
348         return {"block_height": height, "merkle": s, "pos": tx_pos}
349
350
351     def add_to_history(self, addr, tx_hash, tx_pos, tx_height):
352         # keep it sorted
353         s = self.serialize_item(tx_hash, tx_pos, tx_height) + 40*chr(0)
354         assert len(s) == 80
355
356         serialized_hist = self.batch_list[addr]
357
358         l = len(serialized_hist)/80
359         for i in range(l-1, -1, -1):
360             item = serialized_hist[80*i:80*(i+1)]
361             item_height = int(rev_hex(item[36:39].encode('hex')), 16)
362             if item_height <= tx_height:
363                 serialized_hist = serialized_hist[0:80*(i+1)] + s + serialized_hist[80*(i+1):]
364                 break
365         else:
366             serialized_hist = s + serialized_hist
367
368         self.batch_list[addr] = serialized_hist
369
370         # backlink
371         txo = (tx_hash + int_to_hex(tx_pos, 4)).decode('hex')
372         self.batch_txio[txo] = addr
373
374
375
376     def revert_add_to_history(self, addr, tx_hash, tx_pos, tx_height):
377
378         serialized_hist = self.batch_list[addr]
379         s = self.serialize_item(tx_hash, tx_pos, tx_height) + 40*chr(0)
380         if serialized_hist.find(s) == -1: raise
381         serialized_hist = serialized_hist.replace(s, '')
382         self.batch_list[addr] = serialized_hist
383
384
385
386     def prune_history(self, addr, undo):
387         # remove items that have bit set to one
388         if undo.get(addr) is None: undo[addr] = []
389
390         serialized_hist = self.batch_list[addr]
391         l = len(serialized_hist)/80
392         for i in range(l):
393             if len(serialized_hist)/80 < self.pruning_limit: break
394             item = serialized_hist[80*i:80*(i+1)] 
395             if item[39:40] == chr(1):
396                 assert item[79:80] == chr(2)
397                 serialized_hist = serialized_hist[0:80*i] + serialized_hist[80*(i+1):]
398                 undo[addr].append(item)  # items are ordered
399         self.batch_list[addr] = serialized_hist
400
401
402     def revert_prune_history(self, addr, undo):
403         # restore removed items
404         serialized_hist = self.batch_list[addr]
405
406         if undo.get(addr) is not None: 
407             itemlist = undo.pop(addr)
408         else:
409             return 
410
411         if not itemlist: return
412
413         l = len(serialized_hist)/80
414         tx_item = ''
415         for i in range(l-1, -1, -1):
416             if tx_item == '':
417                 if not itemlist: 
418                     break
419                 else:
420                     tx_item = itemlist.pop(-1) # get the last element
421                     tx_height = int(rev_hex(tx_item[36:39].encode('hex')), 16)
422             
423             item = serialized_hist[80*i:80*(i+1)]
424             item_height = int(rev_hex(item[36:39].encode('hex')), 16)
425
426             if item_height < tx_height:
427                 serialized_hist = serialized_hist[0:80*(i+1)] + tx_item + serialized_hist[80*(i+1):]
428                 tx_item = ''
429
430         else:
431             serialized_hist = ''.join(itemlist) + tx_item + serialized_hist
432
433         self.batch_list[addr] = serialized_hist
434
435
436     def set_spent_bit(self, addr, txi, is_spent, txid=None, index=None, height=None):
437         serialized_hist = self.batch_list[addr]
438         l = len(serialized_hist)/80
439         for i in range(l):
440             item = serialized_hist[80*i:80*(i+1)]
441             if item[0:36] == txi:
442                 if is_spent:
443                     new_item = item[0:39] + chr(1) + self.serialize_item(txid, index, height, chr(2))
444                 else:
445                     new_item = item[0:39] + chr(0) + chr(0)*40 
446                 serialized_hist = serialized_hist[0:80*i] + new_item + serialized_hist[80*(i+1):]
447                 break
448         else:
449             self.shared.stop()
450             hist = self.deserialize(serialized_hist)
451             raise BaseException("prevout not found", addr, hist, txi.encode('hex'))
452
453         self.batch_list[addr] = serialized_hist
454
455
456     def unset_spent_bit(self, addr, txi):
457         self.set_spent_bit(addr, txi, False)
458         self.batch_txio[txi] = addr
459
460
461     def deserialize_block(self, block):
462         txlist = block.get('tx')
463         tx_hashes = []  # ordered txids
464         txdict = {}     # deserialized tx
465         is_coinbase = True
466         for raw_tx in txlist:
467             tx_hash = hash_encode(Hash(raw_tx.decode('hex')))
468             vds = deserialize.BCDataStream()
469             vds.write(raw_tx.decode('hex'))
470             try:
471                 tx = deserialize.parse_Transaction(vds, is_coinbase)
472             except:
473                 print_log("ERROR: cannot parse", tx_hash)
474                 continue
475             tx_hashes.append(tx_hash)
476             txdict[tx_hash] = tx
477             is_coinbase = False
478         return tx_hashes, txdict
479
480     def get_undo_info(self, height):
481         s = self.db.Get("undo%d" % (height % 100))
482         return eval(s)
483
484     def write_undo_info(self, batch, height, undo_info):
485         if self.is_test or height > self.bitcoind_height - 100:
486             batch.Put("undo%d" % (height % 100), repr(undo_info))
487
488     def import_block(self, block, block_hash, block_height, sync, revert=False):
489
490         self.batch_list = {}  # address -> history
491         self.batch_txio = {}  # transaction i/o -> address
492
493         block_inputs = []
494         block_outputs = []
495         addr_to_read = []
496
497         # deserialize transactions
498         t0 = time.time()
499         tx_hashes, txdict = self.deserialize_block(block)
500
501         t00 = time.time()
502
503         # undo info
504         if revert:
505             undo_info = self.get_undo_info(block_height)
506         else:
507             undo_info = {}
508
509
510         if not revert:
511             # read addresses of tx inputs
512             for tx in txdict.values():
513                 for x in tx.get('inputs'):
514                     txi = (x.get('prevout_hash') + int_to_hex(x.get('prevout_n'), 4)).decode('hex')
515                     block_inputs.append(txi)
516
517             block_inputs.sort()
518             for txi in block_inputs:
519                 try:
520                     addr = self.db.Get(txi)
521                 except KeyError:
522                     # the input could come from the same block
523                     continue
524                 except:
525                     traceback.print_exc(file=sys.stdout)
526                     self.shared.stop()
527                     raise
528
529                 self.batch_txio[txi] = addr
530                 addr_to_read.append(addr)
531
532         else:
533             for txid, tx in txdict.items():
534                 for x in tx.get('outputs'):
535                     txo = (txid + int_to_hex(x.get('index'), 4)).decode('hex')
536                     block_outputs.append(txo)
537                     addr_to_read.append( x.get('address') )
538
539                 undo = undo_info.get(txid)
540                 for i, x in enumerate(tx.get('inputs')):
541                     addr = undo['prev_addr'][i]
542                     addr_to_read.append(addr)
543
544
545
546
547
548         # read histories of addresses
549         for txid, tx in txdict.items():
550             for x in tx.get('outputs'):
551                 addr_to_read.append(x.get('address'))
552
553         addr_to_read.sort()
554         for addr in addr_to_read:
555             try:
556                 self.batch_list[addr] = self.db.Get(addr)
557             except KeyError:
558                 self.batch_list[addr] = ''
559             except:
560                 traceback.print_exc(file=sys.stdout)
561                 self.shared.stop()
562                 raise
563
564
565         # process
566         t1 = time.time()
567
568         if revert:
569             tx_hashes = tx_hashes[::-1]
570
571
572         for txid in tx_hashes:  # must be ordered
573             tx = txdict[txid]
574             if not revert:
575
576                 undo = { 'prev_addr':[] } # contains the list of pruned items for each address in the tx; also, 'prev_addr' is a list of prev addresses
577                 
578                 prev_addr = []
579                 for i, x in enumerate(tx.get('inputs')):
580                     txi = (x.get('prevout_hash') + int_to_hex(x.get('prevout_n'), 4)).decode('hex')
581                     addr = self.batch_txio[txi]
582
583                     # add redeem item to the history.
584                     # add it right next to the input txi? this will break history sorting, but it's ok if I neglect tx inputs during search
585                     self.set_spent_bit(addr, txi, True, txid, i, block_height)
586
587                     # when I prune, prune a pair
588                     self.prune_history(addr, undo)
589                     prev_addr.append(addr)
590
591                 undo['prev_addr'] = prev_addr 
592
593                 # here I add only the outputs to history; maybe I want to add inputs too (that's in the other loop)
594                 for x in tx.get('outputs'):
595                     addr = x.get('address')
596                     self.add_to_history(addr, txid, x.get('index'), block_height)
597                     self.prune_history(addr, undo)  # prune here because we increased the length of the history
598
599                 undo_info[txid] = undo
600
601             else:
602
603                 undo = undo_info.pop(txid)
604
605                 for x in tx.get('outputs'):
606                     addr = x.get('address')
607                     self.revert_prune_history(addr, undo)
608                     self.revert_add_to_history(addr, txid, x.get('index'), block_height)
609
610                 prev_addr = undo.pop('prev_addr')
611                 for i, x in enumerate(tx.get('inputs')):
612                     addr = prev_addr[i]
613                     self.revert_prune_history(addr, undo)
614                     txi = (x.get('prevout_hash') + int_to_hex(x.get('prevout_n'), 4)).decode('hex')
615                     self.unset_spent_bit(addr, txi)
616
617                 assert undo == {}
618
619         if revert: 
620             assert undo_info == {}
621
622
623         # write
624         max_len = 0
625         max_addr = ''
626         t2 = time.time()
627
628         batch = leveldb.WriteBatch()
629         for addr, serialized_hist in self.batch_list.items():
630             batch.Put(addr, serialized_hist)
631             l = len(serialized_hist)/80
632             if l > max_len:
633                 max_len = l
634                 max_addr = addr
635
636         if not revert:
637             # add new created outputs
638             for txio, addr in self.batch_txio.items():
639                 batch.Put(txio, addr)
640             # delete spent inputs
641             for txi in block_inputs:
642                 batch.Delete(txi)
643             # add undo info
644             self.write_undo_info(batch, block_height, undo_info)
645         else:
646             # restore spent inputs
647             for txio, addr in self.batch_txio.items():
648                 # print "restoring spent input", repr(txio)
649                 batch.Put(txio, addr)
650             # delete spent outputs
651             for txo in block_outputs:
652                 batch.Delete(txo)
653
654         # add the max
655         batch.Put('height', self.serialize([(block_hash, block_height, self.db_version)]))
656
657         # actual write
658         self.db.Write(batch, sync=sync)
659
660         t3 = time.time()
661         if t3 - t0 > 10 and not sync:
662             print_log("block", block_height,
663                       "parse:%0.2f " % (t00 - t0),
664                       "read:%0.2f " % (t1 - t00),
665                       "proc:%.2f " % (t2-t1),
666                       "write:%.2f " % (t3-t2),
667                       "max:", max_len, max_addr)
668
669         for addr in self.batch_list.keys():
670             self.invalidate_cache(addr)
671
672     def add_request(self, session, request):
673         # see if we can get if from cache. if not, add to queue
674         if self.process(session, request, cache_only=True) == -1:
675             self.queue.put((session, request))
676
677
678     def do_subscribe(self, method, params, session):
679         with self.watch_lock:
680             if method == 'blockchain.numblocks.subscribe':
681                 if session not in self.watch_blocks:
682                     self.watch_blocks.append(session)
683
684             elif method == 'blockchain.headers.subscribe':
685                 if session not in self.watch_headers:
686                     self.watch_headers.append(session)
687
688             elif method == 'blockchain.address.subscribe':
689                 address = params[0]
690                 l = self.watched_addresses.get(address)
691                 if l is None:
692                     self.watched_addresses[address] = [session]
693                 elif session not in l:
694                     l.append(session)
695
696
697     def do_unsubscribe(self, method, params, session):
698         with self.watch_lock:
699             if method == 'blockchain.numblocks.subscribe':
700                 if session in self.watch_blocks:
701                     self.watch_blocks.remove(session)
702             elif method == 'blockchain.headers.subscribe':
703                 if session in self.watch_headers:
704                     self.watch_headers.remove(session)
705             elif method == "blockchain.address.subscribe":
706                 addr = params[0]
707                 l = self.watched_addresses.get(addr)
708                 if not l:
709                     return
710                 if session in l:
711                     l.remove(session)
712                 if session in l:
713                     print "error rc!!"
714                     self.shared.stop()
715                 if l == []:
716                     self.watched_addresses.pop(addr)
717
718
719     def process(self, session, request, cache_only=False):
720         
721         message_id = request['id']
722         method = request['method']
723         params = request.get('params', [])
724         result = None
725         error = None
726
727         if method == 'blockchain.numblocks.subscribe':
728             result = self.height
729
730         elif method == 'blockchain.headers.subscribe':
731             result = self.header
732
733         elif method == 'blockchain.address.subscribe':
734             try:
735                 address = params[0]
736                 result = self.get_status(address, cache_only)
737             except BaseException, e:
738                 error = str(e) + ': ' + address
739                 print_log("error:", error)
740
741         elif method == 'blockchain.address.get_history':
742             try:
743                 address = params[0]
744                 result = self.get_history(address, cache_only)
745             except BaseException, e:
746                 error = str(e) + ': ' + address
747                 print_log("error:", error)
748
749         elif method == 'blockchain.block.get_header':
750             if cache_only:
751                 result = -1
752             else:
753                 try:
754                     height = params[0]
755                     result = self.get_header(height)
756                 except BaseException, e:
757                     error = str(e) + ': %d' % height
758                     print_log("error:", error)
759
760         elif method == 'blockchain.block.get_chunk':
761             if cache_only:
762                 result = -1
763             else:
764                 try:
765                     index = params[0]
766                     result = self.get_chunk(index)
767                 except BaseException, e:
768                     error = str(e) + ': %d' % index
769                     print_log("error:", error)
770
771         elif method == 'blockchain.transaction.broadcast':
772             try:
773                 txo = self.bitcoind('sendrawtransaction', params)
774                 print_log("sent tx:", txo)
775                 result = txo
776             except BaseException, e:
777                 result = str(e)  # do not send an error
778                 print_log("error:", result, params)
779
780         elif method == 'blockchain.transaction.get_merkle':
781             if cache_only:
782                 result = -1
783             else:
784                 try:
785                     tx_hash = params[0]
786                     tx_height = params[1]
787                     result = self.get_merkle(tx_hash, tx_height)
788                 except BaseException, e:
789                     error = str(e) + ': ' + repr(params)
790                     print_log("get_merkle error:", error)
791
792         elif method == 'blockchain.transaction.get':
793             try:
794                 tx_hash = params[0]
795                 result = self.bitcoind('getrawtransaction', [tx_hash, 0])
796             except BaseException, e:
797                 error = str(e) + ': ' + repr(params)
798                 print_log("tx get error:", error)
799
800         else:
801             error = "unknown method:%s" % method
802
803         if cache_only and result == -1:
804             return -1
805
806         if error:
807             self.push_response(session, {'id': message_id, 'error': error})
808         elif result != '':
809             self.push_response(session, {'id': message_id, 'result': result})
810
811
812     def getfullblock(self, block_hash):
813         block = self.bitcoind('getblock', [block_hash])
814
815         rawtxreq = []
816         i = 0
817         for txid in block['tx']:
818             rawtxreq.append({
819                 "method": "getrawtransaction",
820                 "params": [txid],
821                 "id": i,
822             })
823             i += 1
824
825         postdata = dumps(rawtxreq)
826         try:
827             respdata = urllib.urlopen(self.bitcoind_url, postdata).read()
828         except:
829             traceback.print_exc(file=sys.stdout)
830             self.shared.stop()
831
832         r = loads(respdata)
833         rawtxdata = []
834         for ir in r:
835             if ir['error'] is not None:
836                 self.shared.stop()
837                 print_log("Error: make sure you run bitcoind with txindex=1; use -reindex if needed.")
838                 raise BaseException(ir['error'])
839             rawtxdata.append(ir['result'])
840         block['tx'] = rawtxdata
841         return block
842
843     def catch_up(self, sync=True):
844         t1 = time.time()
845
846         while not self.shared.stopped():
847             # are we done yet?
848             info = self.bitcoind('getinfo')
849             self.bitcoind_height = info.get('blocks')
850             bitcoind_block_hash = self.bitcoind('getblockhash', [self.bitcoind_height])
851             if self.last_hash == bitcoind_block_hash:
852                 self.up_to_date = True
853                 break
854
855             # not done..
856             self.up_to_date = False
857             next_block_hash = self.bitcoind('getblockhash', [self.height + 1])
858             next_block = self.getfullblock(next_block_hash)
859
860             # fixme: this is unsafe, if we revert when the undo info is not yet written
861             revert = (random.randint(1, 100) == 1) if self.is_test else False
862
863             if (next_block.get('previousblockhash') == self.last_hash) and not revert:
864
865                 self.import_block(next_block, next_block_hash, self.height+1, sync)
866                 self.height = self.height + 1
867                 self.write_header(self.block2header(next_block), sync)
868                 self.last_hash = next_block_hash
869
870                 if self.height % 100 == 0 and not sync:
871                     t2 = time.time()
872                     print_log("catch_up: block %d (%.3fs)" % (self.height, t2 - t1))
873                     t1 = t2
874
875             else:
876                 # revert current block
877                 block = self.getfullblock(self.last_hash)
878                 print_log("blockchain reorg", self.height, block.get('previousblockhash'), self.last_hash)
879                 self.import_block(block, self.last_hash, self.height, sync, revert=True)
880                 self.pop_header()
881                 self.flush_headers()
882
883                 self.height -= 1
884
885                 # read previous header from disk
886                 self.header = self.read_header(self.height)
887                 self.last_hash = self.hash_header(self.header)
888
889         self.header = self.block2header(self.bitcoind('getblock', [self.last_hash]))
890
891
892     def memorypool_update(self):
893         mempool_hashes = set(self.bitcoind('getrawmempool'))
894         touched_addresses = set([])
895
896         for tx_hash in mempool_hashes:
897             if tx_hash in self.mempool_hashes:
898                 continue
899
900             tx = self.get_mempool_transaction(tx_hash)
901             if not tx:
902                 continue
903
904             mpa = self.mempool_addresses.get(tx_hash, [])
905             for x in tx.get('inputs'):
906                 # we assume that the input address can be parsed by deserialize(); this is true for Electrum transactions
907                 addr = x.get('address')
908                 if addr and addr not in mpa:
909                     mpa.append(addr)
910                     touched_addresses.add(addr)
911
912             for x in tx.get('outputs'):
913                 addr = x.get('address')
914                 if addr and addr not in mpa:
915                     mpa.append(addr)
916                     touched_addresses.add(addr)
917
918             self.mempool_addresses[tx_hash] = mpa
919             self.mempool_hashes.add(tx_hash)
920
921         # remove older entries from mempool_hashes
922         self.mempool_hashes = mempool_hashes
923
924         # remove deprecated entries from mempool_addresses
925         for tx_hash, addresses in self.mempool_addresses.items():
926             if tx_hash not in self.mempool_hashes:
927                 self.mempool_addresses.pop(tx_hash)
928                 for addr in addresses:
929                     touched_addresses.add(addr)
930
931         # rebuild mempool histories
932         new_mempool_hist = {}
933         for tx_hash, addresses in self.mempool_addresses.items():
934             for addr in addresses:
935                 h = new_mempool_hist.get(addr, [])
936                 if tx_hash not in h:
937                     h.append(tx_hash)
938                 new_mempool_hist[addr] = h
939
940         with self.mempool_lock:
941             self.mempool_hist = new_mempool_hist
942
943         # invalidate cache for touched addresses
944         for addr in touched_addresses:
945             self.invalidate_cache(addr)
946
947
948     def invalidate_cache(self, address):
949         with self.cache_lock:
950             if address in self.history_cache:
951                 print_log("cache: invalidating", address)
952                 self.history_cache.pop(address)
953
954         with self.watch_lock:
955             sessions = self.watched_addresses.get(address)
956
957         if sessions:
958             # TODO: update cache here. if new value equals cached value, do not send notification
959             self.address_queue.put((address,sessions))
960
961     def main_iteration(self):
962         if self.shared.stopped():
963             print_log("blockchain processor terminating")
964             return
965
966         with self.dblock:
967             t1 = time.time()
968             self.catch_up()
969             t2 = time.time()
970
971         self.memorypool_update()
972
973         if self.sent_height != self.height:
974             self.sent_height = self.height
975             for session in self.watch_blocks:
976                 self.push_response(session, {
977                         'id': None,
978                         'method': 'blockchain.numblocks.subscribe',
979                         'params': [self.height],
980                         })
981
982         if self.sent_header != self.header:
983             print_log("blockchain: %d (%.3fs)" % (self.height, t2 - t1))
984             self.sent_header = self.header
985             for session in self.watch_headers:
986                 self.push_response(session, {
987                         'id': None,
988                         'method': 'blockchain.headers.subscribe',
989                         'params': [self.header],
990                         })
991
992         while True:
993             try:
994                 addr, sessions = self.address_queue.get(False)
995             except:
996                 break
997
998             status = self.get_status(addr)
999             for session in sessions:
1000                 self.push_response(session, {
1001                         'id': None,
1002                         'method': 'blockchain.address.subscribe',
1003                         'params': [addr, status],
1004                         })
1005
1006         if not self.shared.stopped():
1007             threading.Timer(10, self.main_iteration).start()
1008         else:
1009             print_log("blockchain processor terminating")