fix caching problem with chunks
[electrum-server.git] / backends / bitcoind / blockchain_processor.py
1 from json import dumps, loads
2 import leveldb, urllib
3 import deserialize
4 import ast, time, threading, hashlib
5 from Queue import Queue
6 import traceback, sys, os, random
7
8
9 from util import Hash, hash_encode, hash_decode, rev_hex, int_to_hex
10 from util import bc_address_to_hash_160, hash_160_to_bc_address, header_to_string, header_from_string
11 from processor import Processor, print_log
12
13 class BlockchainProcessor(Processor):
14
15     def __init__(self, config, shared):
16         Processor.__init__(self)
17
18         self.shared = shared
19         self.config = config
20         self.up_to_date = False
21         self.watched_addresses = []
22         self.history_cache = {}
23         self.chunk_cache = {}
24         self.cache_lock = threading.Lock()
25         self.headers_data = ''
26
27         self.mempool_addresses = {}
28         self.mempool_hist = {}
29         self.mempool_hashes = []
30         self.mempool_lock = threading.Lock()
31
32         self.address_queue = Queue()
33         self.dbpath = config.get('leveldb', 'path')
34
35         self.dblock = threading.Lock()
36         try:
37             self.db = leveldb.LevelDB(self.dbpath)
38         except:
39             traceback.print_exc(file=sys.stdout)
40             self.shared.stop()
41
42         self.bitcoind_url = 'http://%s:%s@%s:%s/' % (
43             config.get('bitcoind','user'),
44             config.get('bitcoind','password'),
45             config.get('bitcoind','host'),
46             config.get('bitcoind','port'))
47
48         self.height = 0
49         self.is_test = False
50         self.sent_height = 0
51         self.sent_header = None
52
53
54         try:
55             hist = self.deserialize(self.db.Get('height'))
56             self.last_hash, self.height, _ = hist[0] 
57             print_log( "hist", hist )
58         except:
59             #traceback.print_exc(file=sys.stdout)
60             print_log('initializing database')
61             self.height = 0
62             self.last_hash = '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'
63
64         # catch_up headers
65         self.init_headers(self.height)
66
67         threading.Timer(0, lambda: self.catch_up(sync=False)).start()
68         while not shared.stopped() and not self.up_to_date:
69             try:
70                 time.sleep(1)
71             except:
72                 print "keyboard interrupt: stopping threads"
73                 shared.stop()
74                 sys.exit(0)
75
76         print_log( "blockchain is up to date." )
77
78         threading.Timer(10, self.main_iteration).start()
79
80
81
82     def bitcoind(self, method, params=[]):
83         postdata = dumps({"method": method, 'params': params, 'id':'jsonrpc'})
84         respdata = urllib.urlopen(self.bitcoind_url, postdata).read()
85         r = loads(respdata)
86         if r['error'] != None:
87             raise BaseException(r['error'])
88         return r.get('result')
89     
90
91     def serialize(self, h):
92         s = ''
93         for txid, txpos, height in h:
94             s += txid + int_to_hex(txpos, 4) + int_to_hex(height, 4)
95         return s.decode('hex')
96
97
98     def deserialize(self, s):
99         h = []
100         while s:
101             txid = s[0:32].encode('hex')
102             txpos = int( rev_hex( s[32:36].encode('hex') ), 16 )
103             height = int( rev_hex( s[36:40].encode('hex') ), 16 )
104             h.append( ( txid, txpos, height ) )
105             s = s[40:]
106         return h
107
108
109     def block2header(self, b):
110         return {"block_height":b.get('height'), "version":b.get('version'), "prev_block_hash":b.get('previousblockhash'), 
111                 "merkle_root":b.get('merkleroot'), "timestamp":b.get('time'), "bits":int(b.get('bits'),16), "nonce":b.get('nonce')}
112
113
114     def get_header(self, height):
115         block_hash = self.bitcoind('getblockhash', [height])
116         b = self.bitcoind('getblock', [block_hash])
117         return self.block2header(b)
118     
119
120     def init_headers(self, db_height):
121         self.chunk_cache = {}
122         self.headers_filename = os.path.join( self.dbpath, 'blockchain_headers')
123
124         if os.path.exists(self.headers_filename):
125             height = os.path.getsize(self.headers_filename)/80 - 1   # the current height
126             if height > 0:
127                 prev_hash = self.hash_header(self.read_header(height))
128             else:
129                 prev_hash = None
130         else:
131             open(self.headers_filename,'wb').close()
132             prev_hash = None
133             height = -1
134
135         if height < db_height:
136             print_log( "catching up missing headers:", height, db_height)
137
138         try:
139             while height < db_height:
140                 height = height + 1
141                 header = self.get_header(height)
142                 if height>1: 
143                     assert prev_hash == header.get('prev_block_hash')
144                 self.write_header(header, sync=False)
145                 prev_hash = self.hash_header(header)
146                 if height%1000==0: print_log("headers file:",height)
147         except KeyboardInterrupt:
148             self.flush_headers()
149             sys.exit()
150
151         self.flush_headers()
152
153
154     def hash_header(self, header):
155         return rev_hex(Hash(header_to_string(header).decode('hex')).encode('hex'))
156
157
158     def read_header(self, block_height):
159         if os.path.exists(self.headers_filename):
160             f = open(self.headers_filename,'rb')
161             f.seek(block_height*80)
162             h = f.read(80)
163             f.close()
164             if len(h) == 80:
165                 h = header_from_string(h)
166                 return h
167
168
169     def read_chunk(self, index):
170         f = open(self.headers_filename,'rb')
171         f.seek(index*2016*80)
172         chunk = f.read(2016*80)
173         f.close()
174         return chunk.encode('hex')
175
176
177     def write_header(self, header, sync=True):
178         if not self.headers_data:
179             self.headers_offset = header.get('block_height')
180
181         self.headers_data += header_to_string(header).decode('hex')
182         if sync or len(self.headers_data) > 40*100:
183             self.flush_headers()
184
185         with self.cache_lock:
186             chunk_index = header.get('block_height')/2016
187             if self.chunk_cache.get(chunk_index):
188                 self.chunk_cache.pop(chunk_index)
189
190     def pop_header(self):
191         # we need to do this only if we have not flushed
192         if self.headers_data:
193             self.headers_data = self.headers_data[:-40]
194
195     def flush_headers(self):
196         if not self.headers_data: return
197         f = open(self.headers_filename,'rb+')
198         f.seek(self.headers_offset*80)
199         f.write(self.headers_data)
200         f.close()
201         self.headers_data = ''
202
203
204     def get_chunk(self, i):
205         # store them on disk; store the current chunk in memory
206         with self.cache_lock:
207             chunk = self.chunk_cache.get(i)
208             if not chunk:
209                 chunk = self.read_chunk(i)
210                 self.chunk_cache[i] = chunk
211
212         return chunk
213
214
215     def get_transaction(self, txid, block_height=-1, is_coinbase = False):
216         raw_tx = self.bitcoind('getrawtransaction', [txid, 0, block_height])
217         vds = deserialize.BCDataStream()
218         vds.write(raw_tx.decode('hex'))
219         out = deserialize.parse_Transaction(vds, is_coinbase)
220         return out
221
222
223     def get_history(self, addr, cache_only=False):
224         with self.cache_lock: hist = self.history_cache.get( addr )
225         if hist is not None: return hist
226         if cache_only: return -1
227
228         with self.dblock:
229             try:
230                 hash_160 = bc_address_to_hash_160(addr)
231                 hist = self.deserialize(self.db.Get(hash_160))
232                 is_known = True
233             except: 
234                 hist = []
235                 is_known = False
236
237         # should not be necessary
238         hist.sort( key=lambda tup: tup[1])
239         # check uniqueness too...
240
241         # add memory pool
242         with self.mempool_lock:
243             for txid in self.mempool_hist.get(addr,[]):
244                 hist.append((txid, 0, 0))
245
246         hist = map(lambda x: {'tx_hash':x[0], 'height':x[2]}, hist)
247         # add something to distinguish between unused and empty addresses
248         if hist == [] and is_known: hist = ['*']
249
250         with self.cache_lock: self.history_cache[addr] = hist
251         return hist
252
253
254     def get_status(self, addr, cache_only=False):
255         tx_points = self.get_history(addr, cache_only)
256         if cache_only and tx_points == -1: return -1
257
258         if not tx_points: return None
259         if tx_points == ['*']: return '*'
260         status = ''
261         for tx in tx_points:
262             status += tx.get('tx_hash') + ':%d:' % tx.get('height')
263         return hashlib.sha256( status ).digest().encode('hex')
264
265
266     def get_merkle(self, tx_hash, height):
267
268         block_hash = self.bitcoind('getblockhash', [height])
269         b = self.bitcoind('getblock', [block_hash])
270         tx_list = b.get('tx')
271         tx_pos = tx_list.index(tx_hash)
272         
273         merkle = map(hash_decode, tx_list)
274         target_hash = hash_decode(tx_hash)
275         s = []
276         while len(merkle) != 1:
277             if len(merkle)%2: merkle.append( merkle[-1] )
278             n = []
279             while merkle:
280                 new_hash = Hash( merkle[0] + merkle[1] )
281                 if merkle[0] == target_hash:
282                     s.append( hash_encode( merkle[1]))
283                     target_hash = new_hash
284                 elif merkle[1] == target_hash:
285                     s.append( hash_encode( merkle[0]))
286                     target_hash = new_hash
287                 n.append( new_hash )
288                 merkle = merkle[2:]
289             merkle = n
290
291         return {"block_height":height, "merkle":s, "pos":tx_pos}
292
293         
294
295
296     def add_to_history(self, addr, tx_hash, tx_pos, tx_height):
297
298         # keep it sorted
299         s = (tx_hash + int_to_hex(tx_pos, 4) + int_to_hex(tx_height, 4)).decode('hex')
300
301         serialized_hist = self.batch_list[addr] 
302
303         l = len(serialized_hist)/40
304         for i in range(l-1, -1, -1):
305             item = serialized_hist[40*i:40*(i+1)]
306             item_height = int( rev_hex( item[36:40].encode('hex') ), 16 )
307             if item_height < tx_height:
308                 serialized_hist = serialized_hist[0:40*(i+1)] + s + serialized_hist[40*(i+1):]
309                 break
310         else:
311             serialized_hist = s + serialized_hist
312
313         self.batch_list[addr] = serialized_hist
314
315         # backlink
316         txo = (tx_hash + int_to_hex(tx_pos, 4)).decode('hex')
317         self.batch_txio[txo] = addr
318
319
320     def remove_from_history(self, addr, tx_hash, tx_pos):
321                     
322         txi = (tx_hash + int_to_hex(tx_pos, 4)).decode('hex')
323
324         if addr is None:
325             try:
326                 addr = self.batch_txio[txi]
327             except:
328                 raise BaseException(tx_hash, tx_pos)
329         
330         serialized_hist = self.batch_list[addr]
331
332         l = len(serialized_hist)/40
333         for i in range(l):
334             item = serialized_hist[40*i:40*(i+1)]
335             if item[0:36] == txi:
336                 height = int( rev_hex( item[36:40].encode('hex') ), 16 )
337                 serialized_hist = serialized_hist[0:40*i] + serialized_hist[40*(i+1):]
338                 break
339         else:
340             hist = self.deserialize(serialized_hist)
341             raise BaseException("prevout not found", addr, hist, tx_hash, tx_pos)
342
343         self.batch_list[addr] = serialized_hist
344         return height, addr
345
346
347     def deserialize_block(self, block):
348         txlist = block.get('tx')
349         tx_hashes = []  # ordered txids
350         txdict = {}     # deserialized tx
351         is_coinbase = True
352         for raw_tx in txlist:
353             tx_hash = hash_encode(Hash(raw_tx.decode('hex')))
354             tx_hashes.append(tx_hash)
355             vds = deserialize.BCDataStream()
356             vds.write(raw_tx.decode('hex'))
357             tx = deserialize.parse_Transaction(vds, is_coinbase)
358             txdict[tx_hash] = tx
359             is_coinbase = False
360         return tx_hashes, txdict
361
362     def get_undo_info(self, height):
363         s = self.db.Get("undo%d"%(height%100))
364         return eval(s)
365
366     def write_undo_info(self, batch, height, undo_info):
367         if self.is_test or height > self.bitcoind_height - 100:
368             batch.Put("undo%d"%(height%100), repr(undo_info))
369
370
371     def import_block(self, block, block_hash, block_height, sync, revert=False):
372
373         self.batch_list = {}  # address -> history
374         self.batch_txio = {}  # transaction i/o -> address
375
376         block_inputs = []
377         block_outputs = []
378         addr_to_read = []
379
380         # deserialize transactions
381         t0 = time.time()
382         tx_hashes, txdict = self.deserialize_block(block)
383
384         t00 = time.time()
385
386
387         if not revert:
388             # read addresses of tx inputs
389             for tx in txdict.values():
390                 for x in tx.get('inputs'):
391                     txi = (x.get('prevout_hash') + int_to_hex(x.get('prevout_n'), 4)).decode('hex')
392                     block_inputs.append(txi)
393
394             block_inputs.sort()
395             for txi in block_inputs:
396                 try:
397                     addr = self.db.Get(txi)
398                 except:
399                     # the input could come from the same block
400                     continue
401                 self.batch_txio[txi] = addr
402                 addr_to_read.append(addr)
403
404         else:
405             for txid, tx in txdict.items():
406                 for x in tx.get('outputs'):
407                     txo = (txid + int_to_hex(x.get('index'), 4)).decode('hex')
408                     block_outputs.append(txo)
409             
410         # read histories of addresses
411         for txid, tx in txdict.items():
412             for x in tx.get('outputs'):
413                 hash_160 = bc_address_to_hash_160(x.get('address'))
414                 addr_to_read.append(hash_160)
415
416         addr_to_read.sort()
417         for addr in addr_to_read:
418             try:
419                 self.batch_list[addr] = self.db.Get(addr)
420             except: 
421                 self.batch_list[addr] = ''
422
423
424         if revert: 
425             undo_info = self.get_undo_info(block_height)
426             # print "undo", block_height, undo_info
427         else: undo_info = {}
428
429         # process
430         t1 = time.time()
431
432         if revert: tx_hashes = tx_hashes[::-1]
433         for txid in tx_hashes: # must be ordered
434             tx = txdict[txid]
435             if not revert:
436
437                 undo = []
438                 for x in tx.get('inputs'):
439                     prevout_height, prevout_addr = self.remove_from_history( None, x.get('prevout_hash'), x.get('prevout_n'))
440                     undo.append( (prevout_height, prevout_addr) )
441                 undo_info[txid] = undo
442
443                 for x in tx.get('outputs'):
444                     hash_160 = bc_address_to_hash_160(x.get('address'))
445                     self.add_to_history( hash_160, txid, x.get('index'), block_height)
446                     
447             else:
448                 for x in tx.get('outputs'):
449                     hash_160 = bc_address_to_hash_160(x.get('address'))
450                     self.remove_from_history( hash_160, txid, x.get('index'))
451
452                 i = 0
453                 for x in tx.get('inputs'):
454                     prevout_height, prevout_addr = undo_info.get(txid)[i]
455                     i += 1
456
457                     # read the history into batch list
458                     if self.batch_list.get(prevout_addr) is None:
459                         self.batch_list[prevout_addr] = self.db.Get(prevout_addr)
460
461                     # re-add them to the history
462                     self.add_to_history( prevout_addr, x.get('prevout_hash'), x.get('prevout_n'), prevout_height)
463                     print_log( "new hist for", hash_160_to_bc_address(prevout_addr), self.deserialize(self.batch_list[prevout_addr]) )
464
465         # write
466         max_len = 0
467         max_addr = ''
468         t2 = time.time()
469
470         batch = leveldb.WriteBatch()
471         for addr, serialized_hist in self.batch_list.items():
472             batch.Put(addr, serialized_hist)
473             l = len(serialized_hist)
474             if l > max_len:
475                 max_len = l
476                 max_addr = addr
477
478         if not revert:
479             # add new created outputs
480             for txio, addr in self.batch_txio.items():
481                 batch.Put(txio, addr)
482             # delete spent inputs
483             for txi in block_inputs:
484                 batch.Delete(txi)
485             # add undo info 
486             self.write_undo_info(batch, block_height, undo_info)
487         else:
488             # restore spent inputs
489             for txio, addr in self.batch_txio.items():
490                 batch.Put(txio, addr)
491             # delete spent outputs
492             for txo in block_outputs:
493                 batch.Delete(txo)
494
495
496         # add the max
497         batch.Put('height', self.serialize( [(block_hash, block_height, 0)] ) )
498
499         # actual write
500         self.db.Write(batch, sync = sync)
501
502         t3 = time.time()
503         if t3 - t0 > 10 and not sync: 
504             print_log("block", block_height, 
505                       "parse:%0.2f "%(t00 - t0), 
506                       "read:%0.2f "%(t1 - t00), 
507                       "proc:%.2f "%(t2-t1), 
508                       "write:%.2f "%(t3-t2), 
509                       "max:", max_len, hash_160_to_bc_address(max_addr))
510
511         for h160 in self.batch_list.keys(): 
512             addr = hash_160_to_bc_address(h160)
513             self.invalidate_cache(addr)
514
515
516
517     def add_request(self, request):
518         # see if we can get if from cache. if not, add to queue
519         if self.process( request, cache_only = True) == -1:
520             self.queue.put(request)
521
522
523
524     def process(self, request, cache_only = False):
525         #print "abe process", request
526
527         message_id = request['id']
528         method = request['method']
529         params = request.get('params',[])
530         result = None
531         error = None
532
533         if method == 'blockchain.numblocks.subscribe':
534             result = self.height
535
536         elif method == 'blockchain.headers.subscribe':
537             result = self.header
538
539         elif method == 'blockchain.address.subscribe':
540             try:
541                 address = params[0]
542                 result = self.get_status(address, cache_only)
543                 self.watch_address(address)
544             except BaseException, e:
545                 error = str(e) + ': ' + address
546                 print_log( "error:", error )
547
548         elif method == 'blockchain.address.unsubscribe':
549             try:
550                 password = params[0]
551                 address = params[1]
552                 if password == self.config.get('server','password'):
553                     self.watched_addresses.remove(address)
554                     print_log('unsubscribed', address)
555                     result = "ok"
556                 else:
557                     print_log('incorrect password')
558                     result = "authentication error"
559             except BaseException, e:
560                 error = str(e) + ': ' + address
561                 print_log( "error:", error )
562
563         elif method == 'blockchain.address.get_history':
564             try:
565                 address = params[0]
566                 result = self.get_history( address, cache_only )
567             except BaseException, e:
568                 error = str(e) + ': ' + address
569                 print_log( "error:", error )
570
571         elif method == 'blockchain.block.get_header':
572             if cache_only: 
573                 result = -1
574             else:
575                 try:
576                     height = params[0]
577                     result = self.get_header( height ) 
578                 except BaseException, e:
579                     error = str(e) + ': %d'% height
580                     print_log( "error:", error )
581                     
582         elif method == 'blockchain.block.get_chunk':
583             if cache_only:
584                 result = -1
585             else:
586                 try:
587                     index = params[0]
588                     result = self.get_chunk( index ) 
589                 except BaseException, e:
590                     error = str(e) + ': %d'% index
591                     print_log( "error:", error)
592
593         elif method == 'blockchain.transaction.broadcast':
594             txo = self.bitcoind('sendrawtransaction', params)
595             print_log( "sent tx:", txo )
596             result = txo 
597
598         elif method == 'blockchain.transaction.get_merkle':
599             if cache_only:
600                 result = -1
601             else:
602                 try:
603                     tx_hash = params[0]
604                     tx_height = params[1]
605                     result = self.get_merkle(tx_hash, tx_height) 
606                 except BaseException, e:
607                     error = str(e) + ': ' + tx_hash
608                     print_log( "error:", error )
609                     
610         elif method == 'blockchain.transaction.get':
611             try:
612                 tx_hash = params[0]
613                 height = params[1]
614                 result = self.bitcoind('getrawtransaction', [tx_hash, 0, height] ) 
615             except BaseException, e:
616                 error = str(e) + ': ' + tx_hash
617                 print_log( "error:", error )
618
619         else:
620             error = "unknown method:%s"%method
621
622         if cache_only and result == -1: return -1
623
624         if error:
625             response = { 'id':message_id, 'error':error }
626             self.push_response(response)
627         elif result != '':
628             response = { 'id':message_id, 'result':result }
629             self.push_response(response)
630
631
632     def watch_address(self, addr):
633         if addr not in self.watched_addresses:
634             self.watched_addresses.append(addr)
635
636
637
638     def catch_up(self, sync = True):
639
640         t1 = time.time()
641
642         while not self.shared.stopped():
643
644             # are we done yet?
645             info = self.bitcoind('getinfo')
646             self.bitcoind_height = info.get('blocks')
647             bitcoind_block_hash = self.bitcoind('getblockhash', [self.bitcoind_height])
648             if self.last_hash == bitcoind_block_hash: 
649                 self.up_to_date = True
650                 break
651
652             # not done..
653             self.up_to_date = False
654             next_block_hash = self.bitcoind('getblockhash', [self.height+1])
655             next_block = self.bitcoind('getblock', [next_block_hash, 1])
656
657             # fixme: this is unsafe, if we revert when the undo info is not yet written 
658             revert = (random.randint(1, 100)==1) if self.is_test else False        
659
660             if (next_block.get('previousblockhash') == self.last_hash) and not revert:
661
662                 self.import_block(next_block, next_block_hash, self.height+1, sync)
663                 self.height = self.height + 1
664                 self.write_header(self.block2header(next_block), sync)
665                 self.last_hash = next_block_hash
666
667                 if (self.height)%100 == 0 and not sync: 
668                     t2 = time.time()
669                     print_log( "catch_up: block %d (%.3fs)"%( self.height, t2 - t1 ) )
670                     t1 = t2
671                     
672             else:
673                 # revert current block
674                 block = self.bitcoind('getblock', [self.last_hash, 1])
675                 print_log( "blockchain reorg", self.height, block.get('previousblockhash'), self.last_hash )
676                 self.import_block(block, self.last_hash, self.height, sync, revert=True)
677                 self.pop_header()
678                 self.flush_headers()
679
680                 self.height = self.height -1
681
682                 # read previous header from disk
683                 self.header = self.read_header(self.height)
684                 self.last_hash = self.hash_header(self.header)
685         
686
687         self.header = self.block2header(self.bitcoind('getblock', [self.last_hash]))
688
689
690
691             
692     def memorypool_update(self):
693
694         mempool_hashes = self.bitcoind('getrawmempool')
695
696         for tx_hash in mempool_hashes:
697             if tx_hash in self.mempool_hashes: continue
698
699             tx = self.get_transaction(tx_hash)
700             if not tx: continue
701
702             for x in tx.get('inputs'):
703                 txi = (x.get('prevout_hash') + int_to_hex(x.get('prevout_n'), 4)).decode('hex')
704                 try:
705                     h160 = self.db.Get(txi)
706                     addr = hash_160_to_bc_address(h160)
707                 except:
708                     continue
709                 l = self.mempool_addresses.get(tx_hash, [])
710                 if addr not in l: 
711                     l.append( addr )
712                     self.mempool_addresses[tx_hash] = l
713
714             for x in tx.get('outputs'):
715                 addr = x.get('address')
716                 l = self.mempool_addresses.get(tx_hash, [])
717                 if addr not in l: 
718                     l.append( addr )
719                     self.mempool_addresses[tx_hash] = l
720
721             self.mempool_hashes.append(tx_hash)
722
723         # remove older entries from mempool_hashes
724         self.mempool_hashes = mempool_hashes
725
726         # remove deprecated entries from mempool_addresses
727         for tx_hash, addresses in self.mempool_addresses.items():
728             if tx_hash not in self.mempool_hashes:
729                 self.mempool_addresses.pop(tx_hash)
730
731         # rebuild histories
732         new_mempool_hist = {}
733         for tx_hash, addresses in self.mempool_addresses.items():
734             for addr in addresses:
735                 h = new_mempool_hist.get(addr, [])
736                 if tx_hash not in h: 
737                     h.append( tx_hash )
738                 new_mempool_hist[addr] = h
739
740         for addr in new_mempool_hist.keys():
741             if addr in self.mempool_hist.keys():
742                 if self.mempool_hist[addr] != new_mempool_hist[addr]: 
743                     self.invalidate_cache(addr)
744             else:
745                 self.invalidate_cache(addr)
746
747         with self.mempool_lock:
748             self.mempool_hist = new_mempool_hist
749
750
751
752     def invalidate_cache(self, address):
753         with self.cache_lock:
754             if self.history_cache.has_key(address):
755                 print_log( "cache: invalidating", address )
756                 self.history_cache.pop(address)
757
758         if address in self.watched_addresses:
759             self.address_queue.put(address)
760
761
762
763     def main_iteration(self):
764
765         if self.shared.stopped(): 
766             print_log( "blockchain processor terminating" )
767             return
768
769         with self.dblock:
770             t1 = time.time()
771             self.catch_up()
772             t2 = time.time()
773
774         self.memorypool_update()
775         t3 = time.time()
776         # print "mempool:", len(self.mempool_addresses), len(self.mempool_hist), "%.3fs"%(t3 - t2)
777
778
779         if self.sent_height != self.height:
780             self.sent_height = self.height
781             self.push_response({ 'id': None, 'method':'blockchain.numblocks.subscribe', 'params':[self.height] })
782
783         if self.sent_header != self.header:
784             print_log( "blockchain: %d (%.3fs)"%( self.height, t2 - t1 ) )
785             self.sent_header = self.header
786             self.push_response({ 'id': None, 'method':'blockchain.headers.subscribe', 'params':[self.header] })
787
788         while True:
789             try:
790                 addr = self.address_queue.get(False)
791             except:
792                 break
793             if addr in self.watched_addresses:
794                 status = self.get_status( addr )
795                 self.push_response({ 'id': None, 'method':'blockchain.address.subscribe', 'params':[addr, status] })
796
797         if not self.shared.stopped(): 
798             threading.Timer(10, self.main_iteration).start()
799         else:
800             print_log( "blockchain processor terminating" )
801
802
803
804