minor fix
[electrum-server.git] / backends / bitcoind / blockchain_processor.py
1 import ast
2 import hashlib
3 from json import dumps, loads
4 import leveldb
5 import os
6 from Queue import Queue
7 import random
8 import sys
9 import time
10 import threading
11 import traceback
12 import urllib
13
14 from backends.bitcoind import deserialize
15 from processor import Processor, print_log
16 from utils import *
17
18
19 class BlockchainProcessor(Processor):
20
21     def __init__(self, config, shared):
22         Processor.__init__(self)
23
24         self.shared = shared
25         self.config = config
26         self.up_to_date = False
27         self.watched_addresses = []
28         self.history_cache = {}
29         self.chunk_cache = {}
30         self.cache_lock = threading.Lock()
31         self.headers_data = ''
32
33         self.mempool_addresses = {}
34         self.mempool_hist = {}
35         self.mempool_hashes = []
36         self.mempool_lock = threading.Lock()
37
38         self.address_queue = Queue()
39         self.dbpath = config.get('leveldb', 'path')
40         self.pruning_limit = config.getint('leveldb', 'pruning_limit')
41         self.db_version = 1 # increase this when database needs to be updated
42
43         self.dblock = threading.Lock()
44         try:
45             self.db = leveldb.LevelDB(self.dbpath, paranoid_checks=True)
46         except:
47             traceback.print_exc(file=sys.stdout)
48             self.shared.stop()
49
50         self.bitcoind_url = 'http://%s:%s@%s:%s/' % (
51             config.get('bitcoind', 'user'),
52             config.get('bitcoind', 'password'),
53             config.get('bitcoind', 'host'),
54             config.get('bitcoind', 'port'))
55
56         while True:
57             try:
58                 self.bitcoind('getinfo')
59                 break
60             except:
61                 print_log('cannot contact bitcoind...')
62                 time.sleep(5)
63                 continue
64
65         self.height = 0
66         self.is_test = False
67         self.sent_height = 0
68         self.sent_header = None
69
70         try:
71             hist = self.deserialize(self.db.Get('height'))
72             self.last_hash, self.height, db_version = hist[0]
73             print_log("Database version", self.db_version)
74             print_log("Blockchain height", self.height)
75         except:
76             traceback.print_exc(file=sys.stdout)
77             print_log('initializing database')
78             self.height = 0
79             self.last_hash = '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'
80             db_version = self.db_version
81
82         # check version
83         if self.db_version != db_version:
84             print_log("Your database '%s' is deprecated. Please create a new database"%self.dbpath)
85             self.shared.stop()
86             return
87
88         # catch_up headers
89         self.init_headers(self.height)
90
91         threading.Timer(0, lambda: self.catch_up(sync=False)).start()
92         while not shared.stopped() and not self.up_to_date:
93             try:
94                 time.sleep(1)
95             except:
96                 print "keyboard interrupt: stopping threads"
97                 shared.stop()
98                 sys.exit(0)
99
100         print_log("Blockchain is up to date.")
101         self.memorypool_update()
102         print_log("Memory pool initialized.")
103
104         threading.Timer(10, self.main_iteration).start()
105
106     def bitcoind(self, method, params=[]):
107         postdata = dumps({"method": method, 'params': params, 'id': 'jsonrpc'})
108         try:
109             respdata = urllib.urlopen(self.bitcoind_url, postdata).read()
110         except:
111             traceback.print_exc(file=sys.stdout)
112             self.shared.stop()
113
114         r = loads(respdata)
115         if r['error'] is not None:
116             raise BaseException(r['error'])
117         return r.get('result')
118
119     def serialize(self, h):
120         s = ''
121         for txid, txpos, height in h:
122             s += self.serialize_item(txid, txpos, height)
123         return s
124
125     def serialize_item(self, txid, txpos, height, spent=chr(0)):
126         s = (txid + int_to_hex(txpos, 4) + int_to_hex(height, 3)).decode('hex') + spent 
127         return s
128
129     def deserialize_item(self,s):
130         txid = s[0:32].encode('hex')
131         txpos = int(rev_hex(s[32:36].encode('hex')), 16)
132         height = int(rev_hex(s[36:39].encode('hex')), 16)
133         spent = s[39:40]
134         return (txid, txpos, height, spent)
135
136     def deserialize(self, s):
137         h = []
138         while s:
139             txid, txpos, height, spent = self.deserialize_item(s[0:40])
140             h.append((txid, txpos, height))
141             if spent == chr(1):
142                 txid, txpos, height, spent = self.deserialize_item(s[40:80])
143                 h.append((txid, txpos, height))
144             s = s[80:]
145         return h
146
147     def block2header(self, b):
148         return {
149             "block_height": b.get('height'),
150             "version": b.get('version'),
151             "prev_block_hash": b.get('previousblockhash'),
152             "merkle_root": b.get('merkleroot'),
153             "timestamp": b.get('time'),
154             "bits": int(b.get('bits'), 16),
155             "nonce": b.get('nonce'),
156         }
157
158     def get_header(self, height):
159         block_hash = self.bitcoind('getblockhash', [height])
160         b = self.bitcoind('getblock', [block_hash])
161         return self.block2header(b)
162
163     def init_headers(self, db_height):
164         self.chunk_cache = {}
165         self.headers_filename = os.path.join(self.dbpath, 'blockchain_headers')
166
167         if os.path.exists(self.headers_filename):
168             height = os.path.getsize(self.headers_filename)/80 - 1   # the current height
169             if height > 0:
170                 prev_hash = self.hash_header(self.read_header(height))
171             else:
172                 prev_hash = None
173         else:
174             open(self.headers_filename, 'wb').close()
175             prev_hash = None
176             height = -1
177
178         if height < db_height:
179             print_log("catching up missing headers:", height, db_height)
180
181         try:
182             while height < db_height:
183                 height = height + 1
184                 header = self.get_header(height)
185                 if height > 1:
186                     assert prev_hash == header.get('prev_block_hash')
187                 self.write_header(header, sync=False)
188                 prev_hash = self.hash_header(header)
189                 if (height % 1000) == 0:
190                     print_log("headers file:", height)
191         except KeyboardInterrupt:
192             self.flush_headers()
193             sys.exit()
194
195         self.flush_headers()
196
197     def hash_header(self, header):
198         return rev_hex(Hash(header_to_string(header).decode('hex')).encode('hex'))
199
200     def read_header(self, block_height):
201         if os.path.exists(self.headers_filename):
202             with open(self.headers_filename, 'rb') as f:
203                 f.seek(block_height * 80)
204                 h = f.read(80)
205             if len(h) == 80:
206                 h = header_from_string(h)
207                 return h
208
209     def read_chunk(self, index):
210         with open(self.headers_filename, 'rb') as f:
211             f.seek(index*2016*80)
212             chunk = f.read(2016*80)
213         return chunk.encode('hex')
214
215     def write_header(self, header, sync=True):
216         if not self.headers_data:
217             self.headers_offset = header.get('block_height')
218
219         self.headers_data += header_to_string(header).decode('hex')
220         if sync or len(self.headers_data) > 40*100:
221             self.flush_headers()
222
223         with self.cache_lock:
224             chunk_index = header.get('block_height')/2016
225             if self.chunk_cache.get(chunk_index):
226                 self.chunk_cache.pop(chunk_index)
227
228     def pop_header(self):
229         # we need to do this only if we have not flushed
230         if self.headers_data:
231             self.headers_data = self.headers_data[:-40]
232
233     def flush_headers(self):
234         if not self.headers_data:
235             return
236         with open(self.headers_filename, 'rb+') as f:
237             f.seek(self.headers_offset*80)
238             f.write(self.headers_data)
239         self.headers_data = ''
240
241     def get_chunk(self, i):
242         # store them on disk; store the current chunk in memory
243         with self.cache_lock:
244             chunk = self.chunk_cache.get(i)
245             if not chunk:
246                 chunk = self.read_chunk(i)
247                 self.chunk_cache[i] = chunk
248
249         return chunk
250
251     def get_mempool_transaction(self, txid):
252         try:
253             raw_tx = self.bitcoind('getrawtransaction', [txid, 0])
254         except:
255             return None
256
257         vds = deserialize.BCDataStream()
258         vds.write(raw_tx.decode('hex'))
259         try:
260             return deserialize.parse_Transaction(vds, is_coinbase=False)
261         except:
262             print_log("ERROR: cannot parse", txid)
263             return None
264
265     def get_history(self, addr, cache_only=False):
266         with self.cache_lock:
267             hist = self.history_cache.get(addr)
268         if hist is not None:
269             return hist
270         if cache_only:
271             return -1
272
273         with self.dblock:
274             try:
275                 hist = self.deserialize(self.db.Get(addr))
276                 is_known = True
277             except:
278                 hist = []
279                 is_known = False
280
281         # sort history, because redeeming transactions are next to the corresponding txout
282         hist.sort(key=lambda tup: tup[2])
283
284         # add memory pool
285         with self.mempool_lock:
286             for txid in self.mempool_hist.get(addr, []):
287                 hist.append((txid, 0, 0))
288
289         # uniqueness
290         hist = set(map(lambda x: (x[0], x[2]), hist))
291
292         # convert to dict
293         hist = map(lambda x: {'tx_hash': x[0], 'height': x[1]}, hist)
294
295         # add something to distinguish between unused and empty addresses
296         if hist == [] and is_known:
297             hist = ['*']
298
299         with self.cache_lock:
300             self.history_cache[addr] = hist
301         return hist
302
303     def get_status(self, addr, cache_only=False):
304         tx_points = self.get_history(addr, cache_only)
305         if cache_only and tx_points == -1:
306             return -1
307
308         if not tx_points:
309             return None
310         if tx_points == ['*']:
311             return '*'
312         status = ''
313         for tx in tx_points:
314             status += tx.get('tx_hash') + ':%d:' % tx.get('height')
315         return hashlib.sha256(status).digest().encode('hex')
316
317     def get_merkle(self, tx_hash, height):
318
319         block_hash = self.bitcoind('getblockhash', [height])
320         b = self.bitcoind('getblock', [block_hash])
321         tx_list = b.get('tx')
322         tx_pos = tx_list.index(tx_hash)
323
324         merkle = map(hash_decode, tx_list)
325         target_hash = hash_decode(tx_hash)
326         s = []
327         while len(merkle) != 1:
328             if len(merkle) % 2:
329                 merkle.append(merkle[-1])
330             n = []
331             while merkle:
332                 new_hash = Hash(merkle[0] + merkle[1])
333                 if merkle[0] == target_hash:
334                     s.append(hash_encode(merkle[1]))
335                     target_hash = new_hash
336                 elif merkle[1] == target_hash:
337                     s.append(hash_encode(merkle[0]))
338                     target_hash = new_hash
339                 n.append(new_hash)
340                 merkle = merkle[2:]
341             merkle = n
342
343         return {"block_height": height, "merkle": s, "pos": tx_pos}
344
345
346     def add_to_history(self, addr, tx_hash, tx_pos, tx_height):
347         # keep it sorted
348         s = self.serialize_item(tx_hash, tx_pos, tx_height) + 40*chr(0)
349         assert len(s) == 80
350
351         serialized_hist = self.batch_list[addr]
352
353         l = len(serialized_hist)/80
354         for i in range(l-1, -1, -1):
355             item = serialized_hist[80*i:80*(i+1)]
356             item_height = int(rev_hex(item[36:39].encode('hex')), 16)
357             if item_height <= tx_height:
358                 serialized_hist = serialized_hist[0:80*(i+1)] + s + serialized_hist[80*(i+1):]
359                 break
360         else:
361             serialized_hist = s + serialized_hist
362
363         self.batch_list[addr] = serialized_hist
364
365         # backlink
366         txo = (tx_hash + int_to_hex(tx_pos, 4)).decode('hex')
367         self.batch_txio[txo] = addr
368
369
370
371     def revert_add_to_history(self, addr, tx_hash, tx_pos, tx_height):
372
373         serialized_hist = self.batch_list[addr]
374         s = self.serialize_item(tx_hash, tx_pos, tx_height) + 40*chr(0)
375         if serialized_hist.find(s) == -1: raise
376         serialized_hist = serialized_hist.replace(s, '')
377         self.batch_list[addr] = serialized_hist
378
379
380
381     def prune_history(self, addr, undo):
382         # remove items that have bit set to one
383         if undo.get(addr) is None: undo[addr] = []
384
385         serialized_hist = self.batch_list[addr]
386         l = len(serialized_hist)/80
387         for i in range(l):
388             if len(serialized_hist)/80 < self.pruning_limit: break
389             item = serialized_hist[80*i:80*(i+1)] 
390             if item[39:40] == chr(1):
391                 assert item[79:80] == chr(2)
392                 serialized_hist = serialized_hist[0:80*i] + serialized_hist[80*(i+1):]
393                 undo[addr].append(item)  # items are ordered
394         self.batch_list[addr] = serialized_hist
395
396
397     def revert_prune_history(self, addr, undo):
398         # restore removed items
399         serialized_hist = self.batch_list[addr]
400
401         if undo.get(addr) is not None: 
402             itemlist = undo.pop(addr)
403         else:
404             return 
405
406         if not itemlist: return
407
408         l = len(serialized_hist)/80
409         tx_item = ''
410         for i in range(l-1, -1, -1):
411             if tx_item == '':
412                 if not itemlist: 
413                     break
414                 else:
415                     tx_item = itemlist.pop(-1) # get the last element
416                     tx_height = int(rev_hex(tx_item[36:39].encode('hex')), 16)
417             
418             item = serialized_hist[80*i:80*(i+1)]
419             item_height = int(rev_hex(item[36:39].encode('hex')), 16)
420
421             if item_height < tx_height:
422                 serialized_hist = serialized_hist[0:80*(i+1)] + tx_item + serialized_hist[80*(i+1):]
423                 tx_item = ''
424
425         else:
426             serialized_hist = ''.join(itemlist) + tx_item + serialized_hist
427
428         self.batch_list[addr] = serialized_hist
429
430
431     def set_spent_bit(self, addr, txi, is_spent, txid=None, index=None, height=None):
432         serialized_hist = self.batch_list[addr]
433         l = len(serialized_hist)/80
434         for i in range(l):
435             item = serialized_hist[80*i:80*(i+1)]
436             if item[0:36] == txi:
437                 if is_spent:
438                     new_item = item[0:39] + chr(1) + self.serialize_item(txid, index, height, chr(2))
439                 else:
440                     new_item = item[0:39] + chr(0) + chr(0)*40 
441                 serialized_hist = serialized_hist[0:80*i] + new_item + serialized_hist[80*(i+1):]
442                 break
443         else:
444             self.shared.stop()
445             hist = self.deserialize(serialized_hist)
446             raise BaseException("prevout not found", addr, hist, txi.encode('hex'))
447
448         self.batch_list[addr] = serialized_hist
449
450
451     def unset_spent_bit(self, addr, txi):
452         self.set_spent_bit(addr, txi, False)
453         self.batch_txio[txi] = addr
454
455
456     def deserialize_block(self, block):
457         txlist = block.get('tx')
458         tx_hashes = []  # ordered txids
459         txdict = {}     # deserialized tx
460         is_coinbase = True
461         for raw_tx in txlist:
462             tx_hash = hash_encode(Hash(raw_tx.decode('hex')))
463             vds = deserialize.BCDataStream()
464             vds.write(raw_tx.decode('hex'))
465             try:
466                 tx = deserialize.parse_Transaction(vds, is_coinbase)
467             except:
468                 print_log("ERROR: cannot parse", tx_hash)
469                 continue
470             tx_hashes.append(tx_hash)
471             txdict[tx_hash] = tx
472             is_coinbase = False
473         return tx_hashes, txdict
474
475     def get_undo_info(self, height):
476         s = self.db.Get("undo%d" % (height % 100))
477         return eval(s)
478
479     def write_undo_info(self, batch, height, undo_info):
480         if self.is_test or height > self.bitcoind_height - 100:
481             batch.Put("undo%d" % (height % 100), repr(undo_info))
482
483     def import_block(self, block, block_hash, block_height, sync, revert=False):
484
485         self.batch_list = {}  # address -> history
486         self.batch_txio = {}  # transaction i/o -> address
487
488         block_inputs = []
489         block_outputs = []
490         addr_to_read = []
491
492         # deserialize transactions
493         t0 = time.time()
494         tx_hashes, txdict = self.deserialize_block(block)
495
496         t00 = time.time()
497
498         # undo info
499         if revert:
500             undo_info = self.get_undo_info(block_height)
501         else:
502             undo_info = {}
503
504
505         if not revert:
506             # read addresses of tx inputs
507             for tx in txdict.values():
508                 for x in tx.get('inputs'):
509                     txi = (x.get('prevout_hash') + int_to_hex(x.get('prevout_n'), 4)).decode('hex')
510                     block_inputs.append(txi)
511
512             block_inputs.sort()
513             for txi in block_inputs:
514                 try:
515                     addr = self.db.Get(txi)
516                 except KeyError:
517                     # the input could come from the same block
518                     continue
519                 except:
520                     traceback.print_exc(file=sys.stdout)
521                     self.shared.stop()
522                     raise
523
524                 self.batch_txio[txi] = addr
525                 addr_to_read.append(addr)
526
527         else:
528             for txid, tx in txdict.items():
529                 for x in tx.get('outputs'):
530                     txo = (txid + int_to_hex(x.get('index'), 4)).decode('hex')
531                     block_outputs.append(txo)
532                     addr_to_read.append( x.get('address') )
533
534                 undo = undo_info.get(txid)
535                 for i, x in enumerate(tx.get('inputs')):
536                     addr = undo['prev_addr'][i]
537                     addr_to_read.append(addr)
538
539
540
541
542
543         # read histories of addresses
544         for txid, tx in txdict.items():
545             for x in tx.get('outputs'):
546                 addr_to_read.append(x.get('address'))
547
548         addr_to_read.sort()
549         for addr in addr_to_read:
550             try:
551                 self.batch_list[addr] = self.db.Get(addr)
552             except KeyError:
553                 self.batch_list[addr] = ''
554             except:
555                 traceback.print_exc(file=sys.stdout)
556                 self.shared.stop()
557                 raise
558
559
560         # process
561         t1 = time.time()
562
563         if revert:
564             tx_hashes = tx_hashes[::-1]
565
566
567         for txid in tx_hashes:  # must be ordered
568             tx = txdict[txid]
569             if not revert:
570
571                 undo = { 'prev_addr':[] } # contains the list of pruned items for each address in the tx; also, 'prev_addr' is a list of prev addresses
572                 
573                 prev_addr = []
574                 for i, x in enumerate(tx.get('inputs')):
575                     txi = (x.get('prevout_hash') + int_to_hex(x.get('prevout_n'), 4)).decode('hex')
576                     addr = self.batch_txio[txi]
577
578                     # add redeem item to the history.
579                     # add it right next to the input txi? this will break history sorting, but it's ok if I neglect tx inputs during search
580                     self.set_spent_bit(addr, txi, True, txid, i, block_height)
581
582                     # when I prune, prune a pair
583                     self.prune_history(addr, undo)
584                     prev_addr.append(addr)
585
586                 undo['prev_addr'] = prev_addr 
587
588                 # here I add only the outputs to history; maybe I want to add inputs too (that's in the other loop)
589                 for x in tx.get('outputs'):
590                     addr = x.get('address')
591                     self.add_to_history(addr, txid, x.get('index'), block_height)
592                     self.prune_history(addr, undo)  # prune here because we increased the length of the history
593
594                 undo_info[txid] = undo
595
596             else:
597
598                 undo = undo_info.pop(txid)
599
600                 for x in tx.get('outputs'):
601                     addr = x.get('address')
602                     self.revert_prune_history(addr, undo)
603                     self.revert_add_to_history(addr, txid, x.get('index'), block_height)
604
605                 prev_addr = undo.pop('prev_addr')
606                 for i, x in enumerate(tx.get('inputs')):
607                     addr = prev_addr[i]
608                     self.revert_prune_history(addr, undo)
609                     txi = (x.get('prevout_hash') + int_to_hex(x.get('prevout_n'), 4)).decode('hex')
610                     self.unset_spent_bit(addr, txi)
611
612                 assert undo == {}
613
614         if revert: 
615             assert undo_info == {}
616
617
618         # write
619         max_len = 0
620         max_addr = ''
621         t2 = time.time()
622
623         batch = leveldb.WriteBatch()
624         for addr, serialized_hist in self.batch_list.items():
625             batch.Put(addr, serialized_hist)
626             l = len(serialized_hist)/80
627             if l > max_len:
628                 max_len = l
629                 max_addr = addr
630
631         if not revert:
632             # add new created outputs
633             for txio, addr in self.batch_txio.items():
634                 batch.Put(txio, addr)
635             # delete spent inputs
636             for txi in block_inputs:
637                 batch.Delete(txi)
638             # add undo info
639             self.write_undo_info(batch, block_height, undo_info)
640         else:
641             # restore spent inputs
642             for txio, addr in self.batch_txio.items():
643                 # print "restoring spent input", repr(txio)
644                 batch.Put(txio, addr)
645             # delete spent outputs
646             for txo in block_outputs:
647                 batch.Delete(txo)
648
649         # add the max
650         batch.Put('height', self.serialize([(block_hash, block_height, self.db_version)]))
651
652         # actual write
653         self.db.Write(batch, sync=sync)
654
655         t3 = time.time()
656         if t3 - t0 > 10 and not sync:
657             print_log("block", block_height,
658                       "parse:%0.2f " % (t00 - t0),
659                       "read:%0.2f " % (t1 - t00),
660                       "proc:%.2f " % (t2-t1),
661                       "write:%.2f " % (t3-t2),
662                       "max:", max_len, max_addr)
663
664         for addr in self.batch_list.keys():
665             self.invalidate_cache(addr)
666
667     def add_request(self, request):
668         # see if we can get if from cache. if not, add to queue
669         if self.process(request, cache_only=True) == -1:
670             self.queue.put(request)
671
672     def process(self, request, cache_only=False):
673         #print "abe process", request
674
675         message_id = request['id']
676         method = request['method']
677         params = request.get('params', [])
678         result = None
679         error = None
680
681         if method == 'blockchain.numblocks.subscribe':
682             result = self.height
683
684         elif method == 'blockchain.headers.subscribe':
685             result = self.header
686
687         elif method == 'blockchain.address.subscribe':
688             try:
689                 address = params[0]
690                 result = self.get_status(address, cache_only)
691                 self.watch_address(address)
692             except BaseException, e:
693                 error = str(e) + ': ' + address
694                 print_log("error:", error)
695
696         elif method == 'blockchain.address.unsubscribe':
697             try:
698                 password = params[0]
699                 address = params[1]
700                 if password == self.config.get('server', 'password'):
701                     self.watched_addresses.remove(address)
702                     # print_log('unsubscribed', address)
703                     result = "ok"
704                 else:
705                     print_log('incorrect password')
706                     result = "authentication error"
707             except BaseException, e:
708                 error = str(e) + ': ' + address
709                 print_log("error:", error)
710
711         elif method == 'blockchain.address.get_history':
712             try:
713                 address = params[0]
714                 result = self.get_history(address, cache_only)
715             except BaseException, e:
716                 error = str(e) + ': ' + address
717                 print_log("error:", error)
718
719         elif method == 'blockchain.block.get_header':
720             if cache_only:
721                 result = -1
722             else:
723                 try:
724                     height = params[0]
725                     result = self.get_header(height)
726                 except BaseException, e:
727                     error = str(e) + ': %d' % height
728                     print_log("error:", error)
729
730         elif method == 'blockchain.block.get_chunk':
731             if cache_only:
732                 result = -1
733             else:
734                 try:
735                     index = params[0]
736                     result = self.get_chunk(index)
737                 except BaseException, e:
738                     error = str(e) + ': %d' % index
739                     print_log("error:", error)
740
741         elif method == 'blockchain.transaction.broadcast':
742             try:
743                 txo = self.bitcoind('sendrawtransaction', params)
744                 print_log("sent tx:", txo)
745                 result = txo
746             except BaseException, e:
747                 result = str(e)  # do not send an error
748                 print_log("error:", result, params)
749
750         elif method == 'blockchain.transaction.get_merkle':
751             if cache_only:
752                 result = -1
753             else:
754                 try:
755                     tx_hash = params[0]
756                     tx_height = params[1]
757                     result = self.get_merkle(tx_hash, tx_height)
758                 except BaseException, e:
759                     error = str(e) + ': ' + repr(params)
760                     print_log("get_merkle error:", error)
761
762         elif method == 'blockchain.transaction.get':
763             try:
764                 tx_hash = params[0]
765                 result = self.bitcoind('getrawtransaction', [tx_hash, 0])
766             except BaseException, e:
767                 error = str(e) + ': ' + repr(params)
768                 print_log("tx get error:", error)
769
770         else:
771             error = "unknown method:%s" % method
772
773         if cache_only and result == -1:
774             return -1
775
776         if error:
777             self.push_response({'id': message_id, 'error': error})
778         elif result != '':
779             self.push_response({'id': message_id, 'result': result})
780
781     def watch_address(self, addr):
782         if addr not in self.watched_addresses:
783             self.watched_addresses.append(addr)
784
785     def getfullblock(self, block_hash):
786         block = self.bitcoind('getblock', [block_hash])
787
788         rawtxreq = []
789         i = 0
790         for txid in block['tx']:
791             rawtxreq.append({
792                 "method": "getrawtransaction",
793                 "params": [txid],
794                 "id": i,
795             })
796             i += 1
797
798         postdata = dumps(rawtxreq)
799         try:
800             respdata = urllib.urlopen(self.bitcoind_url, postdata).read()
801         except:
802             traceback.print_exc(file=sys.stdout)
803             self.shared.stop()
804
805         r = loads(respdata)
806         rawtxdata = []
807         for ir in r:
808             if ir['error'] is not None:
809                 raise BaseException(ir['error'])
810             rawtxdata.append(ir['result'])
811         block['tx'] = rawtxdata
812         return block
813
814     def catch_up(self, sync=True):
815         t1 = time.time()
816
817         while not self.shared.stopped():
818             # are we done yet?
819             info = self.bitcoind('getinfo')
820             self.bitcoind_height = info.get('blocks')
821             bitcoind_block_hash = self.bitcoind('getblockhash', [self.bitcoind_height])
822             if self.last_hash == bitcoind_block_hash:
823                 self.up_to_date = True
824                 break
825
826             # not done..
827             self.up_to_date = False
828             next_block_hash = self.bitcoind('getblockhash', [self.height + 1])
829             next_block = self.getfullblock(next_block_hash)
830
831             # fixme: this is unsafe, if we revert when the undo info is not yet written
832             revert = (random.randint(1, 100) == 1) if self.is_test else False
833
834             if (next_block.get('previousblockhash') == self.last_hash) and not revert:
835
836                 self.import_block(next_block, next_block_hash, self.height+1, sync)
837                 self.height = self.height + 1
838                 self.write_header(self.block2header(next_block), sync)
839                 self.last_hash = next_block_hash
840
841                 if self.height % 100 == 0 and not sync:
842                     t2 = time.time()
843                     print_log("catch_up: block %d (%.3fs)" % (self.height, t2 - t1))
844                     t1 = t2
845
846             else:
847                 # revert current block
848                 block = self.getfullblock(self.last_hash)
849                 print_log("blockchain reorg", self.height, block.get('previousblockhash'), self.last_hash)
850                 self.import_block(block, self.last_hash, self.height, sync, revert=True)
851                 self.pop_header()
852                 self.flush_headers()
853
854                 self.height -= 1
855
856                 # read previous header from disk
857                 self.header = self.read_header(self.height)
858                 self.last_hash = self.hash_header(self.header)
859
860         self.header = self.block2header(self.bitcoind('getblock', [self.last_hash]))
861
862     def memorypool_update(self):
863         mempool_hashes = self.bitcoind('getrawmempool')
864
865         touched_addresses = []
866         for tx_hash in mempool_hashes:
867             if tx_hash in self.mempool_hashes:
868                 continue
869
870             tx = self.get_mempool_transaction(tx_hash)
871             if not tx:
872                 continue
873
874             mpa = self.mempool_addresses.get(tx_hash, [])
875             for x in tx.get('inputs'):
876                 # we assume that the input address can be parsed by deserialize(); this is true for Electrum transactions
877                 addr = x.get('address')
878                 if addr and addr not in mpa:
879                     mpa.append(addr)
880                     touched_addresses.append(addr)
881
882             for x in tx.get('outputs'):
883                 addr = x.get('address')
884                 if addr and addr not in mpa:
885                     mpa.append(addr)
886                     touched_addresses.append(addr)
887
888             self.mempool_addresses[tx_hash] = mpa
889             self.mempool_hashes.append(tx_hash)
890
891         # remove older entries from mempool_hashes
892         self.mempool_hashes = mempool_hashes
893
894         # remove deprecated entries from mempool_addresses
895         for tx_hash, addresses in self.mempool_addresses.items():
896             if tx_hash not in self.mempool_hashes:
897                 self.mempool_addresses.pop(tx_hash)
898                 for addr in addresses:
899                     touched_addresses.append(addr)
900
901         # rebuild mempool histories
902         new_mempool_hist = {}
903         for tx_hash, addresses in self.mempool_addresses.items():
904             for addr in addresses:
905                 h = new_mempool_hist.get(addr, [])
906                 if tx_hash not in h:
907                     h.append(tx_hash)
908                 new_mempool_hist[addr] = h
909
910         with self.mempool_lock:
911             self.mempool_hist = new_mempool_hist
912
913         # invalidate cache for touched addresses
914         for addr in touched_addresses:
915             self.invalidate_cache(addr)
916
917
918     def invalidate_cache(self, address):
919         with self.cache_lock:
920             if address in self.history_cache:
921                 print_log("cache: invalidating", address)
922                 self.history_cache.pop(address)
923
924         if address in self.watched_addresses:
925             # TODO: update cache here. if new value equals cached value, do not send notification
926             self.address_queue.put(address)
927
928     def main_iteration(self):
929         if self.shared.stopped():
930             print_log("blockchain processor terminating")
931             return
932
933         with self.dblock:
934             t1 = time.time()
935             self.catch_up()
936             t2 = time.time()
937
938         self.memorypool_update()
939
940         if self.sent_height != self.height:
941             self.sent_height = self.height
942             self.push_response({
943                 'id': None,
944                 'method': 'blockchain.numblocks.subscribe',
945                 'params': [self.height],
946             })
947
948         if self.sent_header != self.header:
949             print_log("blockchain: %d (%.3fs)" % (self.height, t2 - t1))
950             self.sent_header = self.header
951             self.push_response({
952                 'id': None,
953                 'method': 'blockchain.headers.subscribe',
954                 'params': [self.header],
955             })
956
957         while True:
958             try:
959                 addr = self.address_queue.get(False)
960             except:
961                 break
962             if addr in self.watched_addresses:
963                 status = self.get_status(addr)
964                 self.push_response({
965                     'id': None,
966                     'method': 'blockchain.address.subscribe',
967                     'params': [addr, status],
968                 })
969
970         if not self.shared.stopped():
971             threading.Timer(10, self.main_iteration).start()
972         else:
973             print_log("blockchain processor terminating")