catch exception raised when mempool has changed
[electrum-server.git] / backends / bitcoind / blockchain_processor.py
1 from json import dumps, loads
2 import leveldb, urllib
3 import deserialize
4 import ast, time, threading, hashlib
5 from Queue import Queue
6 import traceback, sys, os, random
7
8
9 from util import Hash, hash_encode, hash_decode, rev_hex, int_to_hex
10 from util import bc_address_to_hash_160, hash_160_to_bc_address, header_to_string, header_from_string
11 from processor import Processor, print_log
12
13 class BlockchainProcessor(Processor):
14
15     def __init__(self, config, shared):
16         Processor.__init__(self)
17
18         self.shared = shared
19         self.config = config
20         self.up_to_date = False
21         self.watched_addresses = []
22         self.history_cache = {}
23         self.chunk_cache = {}
24         self.cache_lock = threading.Lock()
25         self.headers_data = ''
26
27         self.mempool_addresses = {}
28         self.mempool_hist = {}
29         self.mempool_hashes = []
30         self.mempool_lock = threading.Lock()
31
32         self.address_queue = Queue()
33         self.dbpath = config.get('leveldb', 'path')
34
35         self.dblock = threading.Lock()
36         try:
37             self.db = leveldb.LevelDB(self.dbpath)
38         except:
39             traceback.print_exc(file=sys.stdout)
40             self.shared.stop()
41
42         self.bitcoind_url = 'http://%s:%s@%s:%s/' % (
43             config.get('bitcoind','user'),
44             config.get('bitcoind','password'),
45             config.get('bitcoind','host'),
46             config.get('bitcoind','port'))
47
48         self.height = 0
49         self.is_test = False
50         self.sent_height = 0
51         self.sent_header = None
52
53
54         try:
55             hist = self.deserialize(self.db.Get('height'))
56             self.last_hash, self.height, _ = hist[0] 
57             print_log( "hist", hist )
58         except:
59             #traceback.print_exc(file=sys.stdout)
60             print_log('initializing database')
61             self.height = 0
62             self.last_hash = '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'
63
64         # catch_up headers
65         self.init_headers(self.height)
66
67         threading.Timer(0, lambda: self.catch_up(sync=False)).start()
68         while not shared.stopped() and not self.up_to_date:
69             try:
70                 time.sleep(1)
71             except:
72                 print "keyboard interrupt: stopping threads"
73                 shared.stop()
74                 sys.exit(0)
75
76         print_log( "blockchain is up to date." )
77
78         threading.Timer(10, self.main_iteration).start()
79
80
81
82     def bitcoind(self, method, params=[]):
83         postdata = dumps({"method": method, 'params': params, 'id':'jsonrpc'})
84         try:
85             respdata = urllib.urlopen(self.bitcoind_url, postdata).read()
86         except:
87             traceback.print_exc(file=sys.stdout)
88             self.shared.stop()
89
90         r = loads(respdata)
91         if r['error'] != None:
92             raise BaseException(r['error'])
93         return r.get('result')
94     
95
96     def serialize(self, h):
97         s = ''
98         for txid, txpos, height in h:
99             s += txid + int_to_hex(txpos, 4) + int_to_hex(height, 4)
100         return s.decode('hex')
101
102
103     def deserialize(self, s):
104         h = []
105         while s:
106             txid = s[0:32].encode('hex')
107             txpos = int( rev_hex( s[32:36].encode('hex') ), 16 )
108             height = int( rev_hex( s[36:40].encode('hex') ), 16 )
109             h.append( ( txid, txpos, height ) )
110             s = s[40:]
111         return h
112
113
114     def block2header(self, b):
115         return {"block_height":b.get('height'), "version":b.get('version'), "prev_block_hash":b.get('previousblockhash'), 
116                 "merkle_root":b.get('merkleroot'), "timestamp":b.get('time'), "bits":int(b.get('bits'),16), "nonce":b.get('nonce')}
117
118
119     def get_header(self, height):
120         block_hash = self.bitcoind('getblockhash', [height])
121         b = self.bitcoind('getblock', [block_hash])
122         return self.block2header(b)
123     
124
125     def init_headers(self, db_height):
126         self.chunk_cache = {}
127         self.headers_filename = os.path.join( self.dbpath, 'blockchain_headers')
128
129         if os.path.exists(self.headers_filename):
130             height = os.path.getsize(self.headers_filename)/80 - 1   # the current height
131             if height > 0:
132                 prev_hash = self.hash_header(self.read_header(height))
133             else:
134                 prev_hash = None
135         else:
136             open(self.headers_filename,'wb').close()
137             prev_hash = None
138             height = -1
139
140         if height < db_height:
141             print_log( "catching up missing headers:", height, db_height)
142
143         try:
144             while height < db_height:
145                 height = height + 1
146                 header = self.get_header(height)
147                 if height>1: 
148                     assert prev_hash == header.get('prev_block_hash')
149                 self.write_header(header, sync=False)
150                 prev_hash = self.hash_header(header)
151                 if height%1000==0: print_log("headers file:",height)
152         except KeyboardInterrupt:
153             self.flush_headers()
154             sys.exit()
155
156         self.flush_headers()
157
158
159     def hash_header(self, header):
160         return rev_hex(Hash(header_to_string(header).decode('hex')).encode('hex'))
161
162
163     def read_header(self, block_height):
164         if os.path.exists(self.headers_filename):
165             f = open(self.headers_filename,'rb')
166             f.seek(block_height*80)
167             h = f.read(80)
168             f.close()
169             if len(h) == 80:
170                 h = header_from_string(h)
171                 return h
172
173
174     def read_chunk(self, index):
175         f = open(self.headers_filename,'rb')
176         f.seek(index*2016*80)
177         chunk = f.read(2016*80)
178         f.close()
179         return chunk.encode('hex')
180
181
182     def write_header(self, header, sync=True):
183         if not self.headers_data:
184             self.headers_offset = header.get('block_height')
185
186         self.headers_data += header_to_string(header).decode('hex')
187         if sync or len(self.headers_data) > 40*100:
188             self.flush_headers()
189
190         with self.cache_lock:
191             chunk_index = header.get('block_height')/2016
192             if self.chunk_cache.get(chunk_index):
193                 self.chunk_cache.pop(chunk_index)
194
195     def pop_header(self):
196         # we need to do this only if we have not flushed
197         if self.headers_data:
198             self.headers_data = self.headers_data[:-40]
199
200     def flush_headers(self):
201         if not self.headers_data: return
202         f = open(self.headers_filename,'rb+')
203         f.seek(self.headers_offset*80)
204         f.write(self.headers_data)
205         f.close()
206         self.headers_data = ''
207
208
209     def get_chunk(self, i):
210         # store them on disk; store the current chunk in memory
211         with self.cache_lock:
212             chunk = self.chunk_cache.get(i)
213             if not chunk:
214                 chunk = self.read_chunk(i)
215                 self.chunk_cache[i] = chunk
216
217         return chunk
218
219
220     def get_mempool_transaction(self, txid):
221         try:
222             raw_tx = self.bitcoind('getrawtransaction', [txid, 0, -1])
223         except:
224             return None
225
226         vds = deserialize.BCDataStream()
227         vds.write(raw_tx.decode('hex'))
228         out = deserialize.parse_Transaction(vds, is_coinbase = False)
229         return out
230
231
232     def get_history(self, addr, cache_only=False):
233         with self.cache_lock: hist = self.history_cache.get( addr )
234         if hist is not None: return hist
235         if cache_only: return -1
236
237         with self.dblock:
238             try:
239                 hash_160 = bc_address_to_hash_160(addr)
240                 hist = self.deserialize(self.db.Get(hash_160))
241                 is_known = True
242             except: 
243                 hist = []
244                 is_known = False
245
246         # should not be necessary
247         hist.sort( key=lambda tup: tup[1])
248         # check uniqueness too...
249
250         # add memory pool
251         with self.mempool_lock:
252             for txid in self.mempool_hist.get(addr,[]):
253                 hist.append((txid, 0, 0))
254
255         hist = map(lambda x: {'tx_hash':x[0], 'height':x[2]}, hist)
256         # add something to distinguish between unused and empty addresses
257         if hist == [] and is_known: hist = ['*']
258
259         with self.cache_lock: self.history_cache[addr] = hist
260         return hist
261
262
263     def get_status(self, addr, cache_only=False):
264         tx_points = self.get_history(addr, cache_only)
265         if cache_only and tx_points == -1: return -1
266
267         if not tx_points: return None
268         if tx_points == ['*']: return '*'
269         status = ''
270         for tx in tx_points:
271             status += tx.get('tx_hash') + ':%d:' % tx.get('height')
272         return hashlib.sha256( status ).digest().encode('hex')
273
274
275     def get_merkle(self, tx_hash, height):
276
277         block_hash = self.bitcoind('getblockhash', [height])
278         b = self.bitcoind('getblock', [block_hash])
279         tx_list = b.get('tx')
280         tx_pos = tx_list.index(tx_hash)
281         
282         merkle = map(hash_decode, tx_list)
283         target_hash = hash_decode(tx_hash)
284         s = []
285         while len(merkle) != 1:
286             if len(merkle)%2: merkle.append( merkle[-1] )
287             n = []
288             while merkle:
289                 new_hash = Hash( merkle[0] + merkle[1] )
290                 if merkle[0] == target_hash:
291                     s.append( hash_encode( merkle[1]))
292                     target_hash = new_hash
293                 elif merkle[1] == target_hash:
294                     s.append( hash_encode( merkle[0]))
295                     target_hash = new_hash
296                 n.append( new_hash )
297                 merkle = merkle[2:]
298             merkle = n
299
300         return {"block_height":height, "merkle":s, "pos":tx_pos}
301
302         
303
304
305     def add_to_history(self, addr, tx_hash, tx_pos, tx_height):
306
307         # keep it sorted
308         s = (tx_hash + int_to_hex(tx_pos, 4) + int_to_hex(tx_height, 4)).decode('hex')
309
310         serialized_hist = self.batch_list[addr] 
311
312         l = len(serialized_hist)/40
313         for i in range(l-1, -1, -1):
314             item = serialized_hist[40*i:40*(i+1)]
315             item_height = int( rev_hex( item[36:40].encode('hex') ), 16 )
316             if item_height < tx_height:
317                 serialized_hist = serialized_hist[0:40*(i+1)] + s + serialized_hist[40*(i+1):]
318                 break
319         else:
320             serialized_hist = s + serialized_hist
321
322         self.batch_list[addr] = serialized_hist
323
324         # backlink
325         txo = (tx_hash + int_to_hex(tx_pos, 4)).decode('hex')
326         self.batch_txio[txo] = addr
327
328
329     def remove_from_history(self, addr, tx_hash, tx_pos):
330                     
331         txi = (tx_hash + int_to_hex(tx_pos, 4)).decode('hex')
332
333         if addr is None:
334             try:
335                 addr = self.batch_txio[txi]
336             except:
337                 raise BaseException(tx_hash, tx_pos)
338         
339         serialized_hist = self.batch_list[addr]
340
341         l = len(serialized_hist)/40
342         for i in range(l):
343             item = serialized_hist[40*i:40*(i+1)]
344             if item[0:36] == txi:
345                 height = int( rev_hex( item[36:40].encode('hex') ), 16 )
346                 serialized_hist = serialized_hist[0:40*i] + serialized_hist[40*(i+1):]
347                 break
348         else:
349             hist = self.deserialize(serialized_hist)
350             raise BaseException("prevout not found", addr, hist, tx_hash, tx_pos)
351
352         self.batch_list[addr] = serialized_hist
353         return height, addr
354
355
356     def deserialize_block(self, block):
357         txlist = block.get('tx')
358         tx_hashes = []  # ordered txids
359         txdict = {}     # deserialized tx
360         is_coinbase = True
361         for raw_tx in txlist:
362             tx_hash = hash_encode(Hash(raw_tx.decode('hex')))
363             tx_hashes.append(tx_hash)
364             vds = deserialize.BCDataStream()
365             vds.write(raw_tx.decode('hex'))
366             tx = deserialize.parse_Transaction(vds, is_coinbase)
367             txdict[tx_hash] = tx
368             is_coinbase = False
369         return tx_hashes, txdict
370
371     def get_undo_info(self, height):
372         s = self.db.Get("undo%d"%(height%100))
373         return eval(s)
374
375     def write_undo_info(self, batch, height, undo_info):
376         if self.is_test or height > self.bitcoind_height - 100:
377             batch.Put("undo%d"%(height%100), repr(undo_info))
378
379
380     def import_block(self, block, block_hash, block_height, sync, revert=False):
381
382         self.batch_list = {}  # address -> history
383         self.batch_txio = {}  # transaction i/o -> address
384
385         block_inputs = []
386         block_outputs = []
387         addr_to_read = []
388
389         # deserialize transactions
390         t0 = time.time()
391         tx_hashes, txdict = self.deserialize_block(block)
392
393         t00 = time.time()
394
395
396         if not revert:
397             # read addresses of tx inputs
398             for tx in txdict.values():
399                 for x in tx.get('inputs'):
400                     txi = (x.get('prevout_hash') + int_to_hex(x.get('prevout_n'), 4)).decode('hex')
401                     block_inputs.append(txi)
402
403             block_inputs.sort()
404             for txi in block_inputs:
405                 try:
406                     addr = self.db.Get(txi)
407                 except:
408                     # the input could come from the same block
409                     continue
410                 self.batch_txio[txi] = addr
411                 addr_to_read.append(addr)
412
413         else:
414             for txid, tx in txdict.items():
415                 for x in tx.get('outputs'):
416                     txo = (txid + int_to_hex(x.get('index'), 4)).decode('hex')
417                     block_outputs.append(txo)
418             
419         # read histories of addresses
420         for txid, tx in txdict.items():
421             for x in tx.get('outputs'):
422                 hash_160 = bc_address_to_hash_160(x.get('address'))
423                 addr_to_read.append(hash_160)
424
425         addr_to_read.sort()
426         for addr in addr_to_read:
427             try:
428                 self.batch_list[addr] = self.db.Get(addr)
429             except: 
430                 self.batch_list[addr] = ''
431
432
433         if revert: 
434             undo_info = self.get_undo_info(block_height)
435             # print "undo", block_height, undo_info
436         else: undo_info = {}
437
438         # process
439         t1 = time.time()
440
441         if revert: tx_hashes = tx_hashes[::-1]
442         for txid in tx_hashes: # must be ordered
443             tx = txdict[txid]
444             if not revert:
445
446                 undo = []
447                 for x in tx.get('inputs'):
448                     prevout_height, prevout_addr = self.remove_from_history( None, x.get('prevout_hash'), x.get('prevout_n'))
449                     undo.append( (prevout_height, prevout_addr) )
450                 undo_info[txid] = undo
451
452                 for x in tx.get('outputs'):
453                     hash_160 = bc_address_to_hash_160(x.get('address'))
454                     self.add_to_history( hash_160, txid, x.get('index'), block_height)
455                     
456             else:
457                 for x in tx.get('outputs'):
458                     hash_160 = bc_address_to_hash_160(x.get('address'))
459                     self.remove_from_history( hash_160, txid, x.get('index'))
460
461                 i = 0
462                 for x in tx.get('inputs'):
463                     prevout_height, prevout_addr = undo_info.get(txid)[i]
464                     i += 1
465
466                     # read the history into batch list
467                     if self.batch_list.get(prevout_addr) is None:
468                         self.batch_list[prevout_addr] = self.db.Get(prevout_addr)
469
470                     # re-add them to the history
471                     self.add_to_history( prevout_addr, x.get('prevout_hash'), x.get('prevout_n'), prevout_height)
472                     # print_log( "new hist for", hash_160_to_bc_address(prevout_addr), self.deserialize(self.batch_list[prevout_addr]) )
473
474         # write
475         max_len = 0
476         max_addr = ''
477         t2 = time.time()
478
479         batch = leveldb.WriteBatch()
480         for addr, serialized_hist in self.batch_list.items():
481             batch.Put(addr, serialized_hist)
482             l = len(serialized_hist)
483             if l > max_len:
484                 max_len = l
485                 max_addr = addr
486
487         if not revert:
488             # add new created outputs
489             for txio, addr in self.batch_txio.items():
490                 batch.Put(txio, addr)
491             # delete spent inputs
492             for txi in block_inputs:
493                 batch.Delete(txi)
494             # add undo info 
495             self.write_undo_info(batch, block_height, undo_info)
496         else:
497             # restore spent inputs
498             for txio, addr in self.batch_txio.items():
499                 batch.Put(txio, addr)
500             # delete spent outputs
501             for txo in block_outputs:
502                 batch.Delete(txo)
503
504
505         # add the max
506         batch.Put('height', self.serialize( [(block_hash, block_height, 0)] ) )
507
508         # actual write
509         self.db.Write(batch, sync = sync)
510
511         t3 = time.time()
512         if t3 - t0 > 10 and not sync: 
513             print_log("block", block_height, 
514                       "parse:%0.2f "%(t00 - t0), 
515                       "read:%0.2f "%(t1 - t00), 
516                       "proc:%.2f "%(t2-t1), 
517                       "write:%.2f "%(t3-t2), 
518                       "max:", max_len, hash_160_to_bc_address(max_addr))
519
520         for h160 in self.batch_list.keys(): 
521             addr = hash_160_to_bc_address(h160)
522             self.invalidate_cache(addr)
523
524
525
526     def add_request(self, request):
527         # see if we can get if from cache. if not, add to queue
528         if self.process( request, cache_only = True) == -1:
529             self.queue.put(request)
530
531
532
533     def process(self, request, cache_only = False):
534         #print "abe process", request
535
536         message_id = request['id']
537         method = request['method']
538         params = request.get('params',[])
539         result = None
540         error = None
541
542         if method == 'blockchain.numblocks.subscribe':
543             result = self.height
544
545         elif method == 'blockchain.headers.subscribe':
546             result = self.header
547
548         elif method == 'blockchain.address.subscribe':
549             try:
550                 address = params[0]
551                 result = self.get_status(address, cache_only)
552                 self.watch_address(address)
553             except BaseException, e:
554                 error = str(e) + ': ' + address
555                 print_log( "error:", error )
556
557         elif method == 'blockchain.address.unsubscribe':
558             try:
559                 password = params[0]
560                 address = params[1]
561                 if password == self.config.get('server','password'):
562                     self.watched_addresses.remove(address)
563                     print_log('unsubscribed', address)
564                     result = "ok"
565                 else:
566                     print_log('incorrect password')
567                     result = "authentication error"
568             except BaseException, e:
569                 error = str(e) + ': ' + address
570                 print_log( "error:", error )
571
572         elif method == 'blockchain.address.get_history':
573             try:
574                 address = params[0]
575                 result = self.get_history( address, cache_only )
576             except BaseException, e:
577                 error = str(e) + ': ' + address
578                 print_log( "error:", error )
579
580         elif method == 'blockchain.block.get_header':
581             if cache_only: 
582                 result = -1
583             else:
584                 try:
585                     height = params[0]
586                     result = self.get_header( height ) 
587                 except BaseException, e:
588                     error = str(e) + ': %d'% height
589                     print_log( "error:", error )
590                     
591         elif method == 'blockchain.block.get_chunk':
592             if cache_only:
593                 result = -1
594             else:
595                 try:
596                     index = params[0]
597                     result = self.get_chunk( index ) 
598                 except BaseException, e:
599                     error = str(e) + ': %d'% index
600                     print_log( "error:", error)
601
602         elif method == 'blockchain.transaction.broadcast':
603             try:
604                 txo = self.bitcoind('sendrawtransaction', params)
605                 print_log( "sent tx:", txo )
606                 result = txo 
607             except BaseException, e:
608                 result = str(e) # do not send an error
609                 print_log( "error:", str(e), params )
610
611         elif method == 'blockchain.transaction.get_merkle':
612             if cache_only:
613                 result = -1
614             else:
615                 try:
616                     tx_hash = params[0]
617                     tx_height = params[1]
618                     result = self.get_merkle(tx_hash, tx_height) 
619                 except BaseException, e:
620                     error = str(e) + ': ' + tx_hash
621                     print_log( "error:", error )
622                     
623         elif method == 'blockchain.transaction.get':
624             try:
625                 tx_hash = params[0]
626                 height = params[1]
627                 result = self.bitcoind('getrawtransaction', [tx_hash, 0, height] ) 
628             except BaseException, e:
629                 error = str(e) + ': ' + tx_hash
630                 print_log( "error:", error )
631
632         else:
633             error = "unknown method:%s"%method
634
635         if cache_only and result == -1: return -1
636
637         if error:
638             response = { 'id':message_id, 'error':error }
639             self.push_response(response)
640         elif result != '':
641             response = { 'id':message_id, 'result':result }
642             self.push_response(response)
643
644
645     def watch_address(self, addr):
646         if addr not in self.watched_addresses:
647             self.watched_addresses.append(addr)
648
649
650
651     def catch_up(self, sync = True):
652
653         t1 = time.time()
654
655         while not self.shared.stopped():
656
657             # are we done yet?
658             info = self.bitcoind('getinfo')
659             self.bitcoind_height = info.get('blocks')
660             bitcoind_block_hash = self.bitcoind('getblockhash', [self.bitcoind_height])
661             if self.last_hash == bitcoind_block_hash: 
662                 self.up_to_date = True
663                 break
664
665             # not done..
666             self.up_to_date = False
667             next_block_hash = self.bitcoind('getblockhash', [self.height+1])
668             next_block = self.bitcoind('getblock', [next_block_hash, 1])
669
670             # fixme: this is unsafe, if we revert when the undo info is not yet written 
671             revert = (random.randint(1, 100)==1) if self.is_test else False        
672
673             if (next_block.get('previousblockhash') == self.last_hash) and not revert:
674
675                 self.import_block(next_block, next_block_hash, self.height+1, sync)
676                 self.height = self.height + 1
677                 self.write_header(self.block2header(next_block), sync)
678                 self.last_hash = next_block_hash
679
680                 if (self.height)%100 == 0 and not sync: 
681                     t2 = time.time()
682                     print_log( "catch_up: block %d (%.3fs)"%( self.height, t2 - t1 ) )
683                     t1 = t2
684                     
685             else:
686                 # revert current block
687                 block = self.bitcoind('getblock', [self.last_hash, 1])
688                 print_log( "blockchain reorg", self.height, block.get('previousblockhash'), self.last_hash )
689                 self.import_block(block, self.last_hash, self.height, sync, revert=True)
690                 self.pop_header()
691                 self.flush_headers()
692
693                 self.height = self.height -1
694
695                 # read previous header from disk
696                 self.header = self.read_header(self.height)
697                 self.last_hash = self.hash_header(self.header)
698         
699
700         self.header = self.block2header(self.bitcoind('getblock', [self.last_hash]))
701
702
703
704             
705     def memorypool_update(self):
706
707         mempool_hashes = self.bitcoind('getrawmempool')
708
709         for tx_hash in mempool_hashes:
710             if tx_hash in self.mempool_hashes: continue
711
712             tx = self.get_mempool_transaction(tx_hash)
713             if not tx: continue
714
715             for x in tx.get('inputs'):
716                 txi = (x.get('prevout_hash') + int_to_hex(x.get('prevout_n'), 4)).decode('hex')
717                 try:
718                     h160 = self.db.Get(txi)
719                     addr = hash_160_to_bc_address(h160)
720                 except:
721                     continue
722                 l = self.mempool_addresses.get(tx_hash, [])
723                 if addr not in l: 
724                     l.append( addr )
725                     self.mempool_addresses[tx_hash] = l
726
727             for x in tx.get('outputs'):
728                 addr = x.get('address')
729                 l = self.mempool_addresses.get(tx_hash, [])
730                 if addr not in l: 
731                     l.append( addr )
732                     self.mempool_addresses[tx_hash] = l
733
734             self.mempool_hashes.append(tx_hash)
735
736         # remove older entries from mempool_hashes
737         self.mempool_hashes = mempool_hashes
738
739         # remove deprecated entries from mempool_addresses
740         for tx_hash, addresses in self.mempool_addresses.items():
741             if tx_hash not in self.mempool_hashes:
742                 self.mempool_addresses.pop(tx_hash)
743
744         # rebuild histories
745         new_mempool_hist = {}
746         for tx_hash, addresses in self.mempool_addresses.items():
747             for addr in addresses:
748                 h = new_mempool_hist.get(addr, [])
749                 if tx_hash not in h: 
750                     h.append( tx_hash )
751                 new_mempool_hist[addr] = h
752
753         for addr in new_mempool_hist.keys():
754             if addr in self.mempool_hist.keys():
755                 if self.mempool_hist[addr] != new_mempool_hist[addr]: 
756                     self.invalidate_cache(addr)
757             else:
758                 self.invalidate_cache(addr)
759
760         with self.mempool_lock:
761             self.mempool_hist = new_mempool_hist
762
763
764
765     def invalidate_cache(self, address):
766         with self.cache_lock:
767             if self.history_cache.has_key(address):
768                 print_log( "cache: invalidating", address )
769                 self.history_cache.pop(address)
770
771         if address in self.watched_addresses:
772             self.address_queue.put(address)
773
774
775
776     def main_iteration(self):
777
778         if self.shared.stopped(): 
779             print_log( "blockchain processor terminating" )
780             return
781
782         with self.dblock:
783             t1 = time.time()
784             self.catch_up()
785             t2 = time.time()
786
787         self.memorypool_update()
788         t3 = time.time()
789         # print "mempool:", len(self.mempool_addresses), len(self.mempool_hist), "%.3fs"%(t3 - t2)
790
791
792         if self.sent_height != self.height:
793             self.sent_height = self.height
794             self.push_response({ 'id': None, 'method':'blockchain.numblocks.subscribe', 'params':[self.height] })
795
796         if self.sent_header != self.header:
797             print_log( "blockchain: %d (%.3fs)"%( self.height, t2 - t1 ) )
798             self.sent_header = self.header
799             self.push_response({ 'id': None, 'method':'blockchain.headers.subscribe', 'params':[self.header] })
800
801         while True:
802             try:
803                 addr = self.address_queue.get(False)
804             except:
805                 break
806             if addr in self.watched_addresses:
807                 status = self.get_status( addr )
808                 self.push_response({ 'id': None, 'method':'blockchain.address.subscribe', 'params':[addr, status] })
809
810         if not self.shared.stopped(): 
811             threading.Timer(10, self.main_iteration).start()
812         else:
813             print_log( "blockchain processor terminating" )
814
815
816
817