password protect unsubscribe
[electrum-server.git] / backends / bitcoind / blockchain_processor.py
1 from json import dumps, loads
2 import leveldb, urllib
3 import deserialize
4 import ast, time, threading, hashlib
5 from Queue import Queue
6 import traceback, sys, os, random
7
8
9 from util import Hash, hash_encode, hash_decode, rev_hex, int_to_hex
10 from util import bc_address_to_hash_160, hash_160_to_bc_address, header_to_string, header_from_string
11 from processor import Processor, print_log
12
13 class BlockchainProcessor(Processor):
14
15     def __init__(self, config, shared):
16         Processor.__init__(self)
17
18         self.shared = shared
19         self.config = config
20         self.up_to_date = False
21         self.watched_addresses = []
22         self.history_cache = {}
23         self.chunk_cache = {}
24         self.cache_lock = threading.Lock()
25         self.headers_data = ''
26
27         self.mempool_addresses = {}
28         self.mempool_hist = {}
29         self.mempool_hashes = []
30         self.mempool_lock = threading.Lock()
31
32         self.address_queue = Queue()
33         self.dbpath = config.get('leveldb', 'path')
34
35         self.dblock = threading.Lock()
36         try:
37             self.db = leveldb.LevelDB(self.dbpath)
38         except:
39             traceback.print_exc(file=sys.stdout)
40             self.shared.stop()
41
42         self.bitcoind_url = 'http://%s:%s@%s:%s/' % (
43             config.get('bitcoind','user'),
44             config.get('bitcoind','password'),
45             config.get('bitcoind','host'),
46             config.get('bitcoind','port'))
47
48         self.height = 0
49         self.is_test = False
50         self.sent_height = 0
51         self.sent_header = None
52
53
54         try:
55             hist = self.deserialize(self.db.Get('height'))
56             self.last_hash, self.height, _ = hist[0] 
57             print_log( "hist", hist )
58         except:
59             #traceback.print_exc(file=sys.stdout)
60             print_log('initializing database')
61             self.height = 0
62             self.last_hash = '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'
63
64         # catch_up headers
65         self.init_headers(self.height)
66
67         threading.Timer(0, lambda: self.catch_up(sync=False)).start()
68         while not shared.stopped() and not self.up_to_date:
69             try:
70                 time.sleep(1)
71             except:
72                 print "keyboard interrupt: stopping threads"
73                 shared.stop()
74                 sys.exit(0)
75
76         print_log( "blockchain is up to date." )
77
78         threading.Timer(10, self.main_iteration).start()
79
80
81
82     def bitcoind(self, method, params=[]):
83         postdata = dumps({"method": method, 'params': params, 'id':'jsonrpc'})
84         respdata = urllib.urlopen(self.bitcoind_url, postdata).read()
85         r = loads(respdata)
86         if r['error'] != None:
87             raise BaseException(r['error'])
88         return r.get('result')
89     
90
91     def serialize(self, h):
92         s = ''
93         for txid, txpos, height in h:
94             s += txid + int_to_hex(txpos, 4) + int_to_hex(height, 4)
95         return s.decode('hex')
96
97
98     def deserialize(self, s):
99         h = []
100         while s:
101             txid = s[0:32].encode('hex')
102             txpos = int( rev_hex( s[32:36].encode('hex') ), 16 )
103             height = int( rev_hex( s[36:40].encode('hex') ), 16 )
104             h.append( ( txid, txpos, height ) )
105             s = s[40:]
106         return h
107
108
109     def block2header(self, b):
110         return {"block_height":b.get('height'), "version":b.get('version'), "prev_block_hash":b.get('previousblockhash'), 
111                 "merkle_root":b.get('merkleroot'), "timestamp":b.get('time'), "bits":int(b.get('bits'),16), "nonce":b.get('nonce')}
112
113
114     def get_header(self, height):
115         block_hash = self.bitcoind('getblockhash', [height])
116         b = self.bitcoind('getblock', [block_hash])
117         return self.block2header(b)
118     
119
120     def init_headers(self, db_height):
121         self.chunk_cache = {}
122         self.headers_filename = os.path.join( self.dbpath, 'blockchain_headers')
123
124         if os.path.exists(self.headers_filename):
125             height = os.path.getsize(self.headers_filename)/80 - 1   # the current height
126             if height > 0:
127                 prev_hash = self.hash_header(self.read_header(height))
128             else:
129                 prev_hash = None
130         else:
131             open(self.headers_filename,'wb').close()
132             prev_hash = None
133             height = -1
134
135         if height < db_height:
136             print_log( "catching up missing headers:", height, db_height)
137
138         try:
139             while height < db_height:
140                 height = height + 1
141                 header = self.get_header(height)
142                 if height>1: 
143                     assert prev_hash == header.get('prev_block_hash')
144                 self.write_header(header, sync=False)
145                 prev_hash = self.hash_header(header)
146                 if height%1000==0: print_log("headers file:",height)
147         except KeyboardInterrupt:
148             self.flush_headers()
149             sys.exit()
150
151         self.flush_headers()
152
153
154     def hash_header(self, header):
155         return rev_hex(Hash(header_to_string(header).decode('hex')).encode('hex'))
156
157
158     def read_header(self, block_height):
159         if os.path.exists(self.headers_filename):
160             f = open(self.headers_filename,'rb')
161             f.seek(block_height*80)
162             h = f.read(80)
163             f.close()
164             if len(h) == 80:
165                 h = header_from_string(h)
166                 return h
167
168
169     def read_chunk(self, index):
170         f = open(self.headers_filename,'rb')
171         f.seek(index*2016*80)
172         chunk = f.read(2016*80)
173         f.close()
174         return chunk.encode('hex')
175
176
177     def write_header(self, header, sync=True):
178         if not self.headers_data:
179             self.headers_offset = header.get('block_height')
180
181         self.headers_data += header_to_string(header).decode('hex')
182         if sync or len(self.headers_data) > 40*100:
183             self.flush_headers()
184
185     def pop_header(self):
186         # we need to do this only if we have not flushed
187         if self.headers_data:
188             self.headers_data = self.headers_data[:-40]
189
190     def flush_headers(self):
191         if not self.headers_data: return
192         f = open(self.headers_filename,'rb+')
193         f.seek(self.headers_offset*80)
194         f.write(self.headers_data)
195         f.close()
196         self.headers_data = ''
197
198
199     def get_chunk(self, i):
200         # store them on disk; store the current chunk in memory
201         chunk = self.chunk_cache.get(i)
202         if not chunk:
203             chunk = self.read_chunk(i)
204             self.chunk_cache[i] = chunk
205         return chunk
206
207
208     def get_transaction(self, txid, block_height=-1, is_coinbase = False):
209         raw_tx = self.bitcoind('getrawtransaction', [txid, 0, block_height])
210         vds = deserialize.BCDataStream()
211         vds.write(raw_tx.decode('hex'))
212         out = deserialize.parse_Transaction(vds, is_coinbase)
213         return out
214
215
216     def get_history(self, addr, cache_only=False):
217         with self.cache_lock: hist = self.history_cache.get( addr )
218         if hist is not None: return hist
219         if cache_only: return -1
220
221         with self.dblock:
222             try:
223                 hash_160 = bc_address_to_hash_160(addr)
224                 hist = self.deserialize(self.db.Get(hash_160))
225                 is_known = True
226             except: 
227                 hist = []
228                 is_known = False
229
230         # should not be necessary
231         hist.sort( key=lambda tup: tup[1])
232         # check uniqueness too...
233
234         # add memory pool
235         with self.mempool_lock:
236             for txid in self.mempool_hist.get(addr,[]):
237                 hist.append((txid, 0, 0))
238
239         hist = map(lambda x: {'tx_hash':x[0], 'height':x[2]}, hist)
240         # add something to distinguish between unused and empty addresses
241         if hist == [] and is_known: hist = ['*']
242
243         with self.cache_lock: self.history_cache[addr] = hist
244         return hist
245
246
247     def get_status(self, addr, cache_only=False):
248         tx_points = self.get_history(addr, cache_only)
249         if cache_only and tx_points == -1: return -1
250
251         if not tx_points: return None
252         if tx_points == ['*']: return '*'
253         status = ''
254         for tx in tx_points:
255             status += tx.get('tx_hash') + ':%d:' % tx.get('height')
256         return hashlib.sha256( status ).digest().encode('hex')
257
258
259     def get_merkle(self, tx_hash, height):
260
261         block_hash = self.bitcoind('getblockhash', [height])
262         b = self.bitcoind('getblock', [block_hash])
263         tx_list = b.get('tx')
264         tx_pos = tx_list.index(tx_hash)
265         
266         merkle = map(hash_decode, tx_list)
267         target_hash = hash_decode(tx_hash)
268         s = []
269         while len(merkle) != 1:
270             if len(merkle)%2: merkle.append( merkle[-1] )
271             n = []
272             while merkle:
273                 new_hash = Hash( merkle[0] + merkle[1] )
274                 if merkle[0] == target_hash:
275                     s.append( hash_encode( merkle[1]))
276                     target_hash = new_hash
277                 elif merkle[1] == target_hash:
278                     s.append( hash_encode( merkle[0]))
279                     target_hash = new_hash
280                 n.append( new_hash )
281                 merkle = merkle[2:]
282             merkle = n
283
284         return {"block_height":height, "merkle":s, "pos":tx_pos}
285
286         
287
288
289     def add_to_history(self, addr, tx_hash, tx_pos, tx_height):
290
291         # keep it sorted
292         s = (tx_hash + int_to_hex(tx_pos, 4) + int_to_hex(tx_height, 4)).decode('hex')
293
294         serialized_hist = self.batch_list[addr] 
295
296         l = len(serialized_hist)/40
297         for i in range(l-1, -1, -1):
298             item = serialized_hist[40*i:40*(i+1)]
299             item_height = int( rev_hex( item[36:40].encode('hex') ), 16 )
300             if item_height < tx_height:
301                 serialized_hist = serialized_hist[0:40*(i+1)] + s + serialized_hist[40*(i+1):]
302                 break
303         else:
304             serialized_hist = s + serialized_hist
305
306         self.batch_list[addr] = serialized_hist
307
308         # backlink
309         txo = (tx_hash + int_to_hex(tx_pos, 4)).decode('hex')
310         self.batch_txio[txo] = addr
311
312
313     def remove_from_history(self, addr, tx_hash, tx_pos):
314                     
315         txi = (tx_hash + int_to_hex(tx_pos, 4)).decode('hex')
316
317         if addr is None:
318             try:
319                 addr = self.batch_txio[txi]
320             except:
321                 raise BaseException(tx_hash, tx_pos)
322         
323         serialized_hist = self.batch_list[addr]
324
325         l = len(serialized_hist)/40
326         for i in range(l):
327             item = serialized_hist[40*i:40*(i+1)]
328             if item[0:36] == txi:
329                 height = int( rev_hex( item[36:40].encode('hex') ), 16 )
330                 serialized_hist = serialized_hist[0:40*i] + serialized_hist[40*(i+1):]
331                 break
332         else:
333             hist = self.deserialize(serialized_hist)
334             raise BaseException("prevout not found", addr, hist, tx_hash, tx_pos)
335
336         self.batch_list[addr] = serialized_hist
337         return height, addr
338
339
340     def deserialize_block(self, block):
341         txlist = block.get('tx')
342         tx_hashes = []  # ordered txids
343         txdict = {}     # deserialized tx
344         is_coinbase = True
345         for raw_tx in txlist:
346             tx_hash = hash_encode(Hash(raw_tx.decode('hex')))
347             tx_hashes.append(tx_hash)
348             vds = deserialize.BCDataStream()
349             vds.write(raw_tx.decode('hex'))
350             tx = deserialize.parse_Transaction(vds, is_coinbase)
351             txdict[tx_hash] = tx
352             is_coinbase = False
353         return tx_hashes, txdict
354
355     def get_undo_info(self, height):
356         s = self.db.Get("undo%d"%(height%100))
357         return eval(s)
358
359     def write_undo_info(self, batch, height, undo_info):
360         if self.is_test or height > self.bitcoind_height - 100:
361             batch.Put("undo%d"%(height%100), repr(undo_info))
362
363
364     def import_block(self, block, block_hash, block_height, sync, revert=False):
365
366         self.batch_list = {}  # address -> history
367         self.batch_txio = {}  # transaction i/o -> address
368
369         block_inputs = []
370         block_outputs = []
371         addr_to_read = []
372
373         # deserialize transactions
374         t0 = time.time()
375         tx_hashes, txdict = self.deserialize_block(block)
376
377         t00 = time.time()
378
379
380         if not revert:
381             # read addresses of tx inputs
382             for tx in txdict.values():
383                 for x in tx.get('inputs'):
384                     txi = (x.get('prevout_hash') + int_to_hex(x.get('prevout_n'), 4)).decode('hex')
385                     block_inputs.append(txi)
386
387             block_inputs.sort()
388             for txi in block_inputs:
389                 try:
390                     addr = self.db.Get(txi)
391                 except:
392                     # the input could come from the same block
393                     continue
394                 self.batch_txio[txi] = addr
395                 addr_to_read.append(addr)
396
397         else:
398             for txid, tx in txdict.items():
399                 for x in tx.get('outputs'):
400                     txo = (txid + int_to_hex(x.get('index'), 4)).decode('hex')
401                     block_outputs.append(txo)
402             
403         # read histories of addresses
404         for txid, tx in txdict.items():
405             for x in tx.get('outputs'):
406                 hash_160 = bc_address_to_hash_160(x.get('address'))
407                 addr_to_read.append(hash_160)
408
409         addr_to_read.sort()
410         for addr in addr_to_read:
411             try:
412                 self.batch_list[addr] = self.db.Get(addr)
413             except: 
414                 self.batch_list[addr] = ''
415
416
417         if revert: 
418             undo_info = self.get_undo_info(block_height)
419             # print "undo", block_height, undo_info
420         else: undo_info = {}
421
422         # process
423         t1 = time.time()
424
425         if revert: tx_hashes = tx_hashes[::-1]
426         for txid in tx_hashes: # must be ordered
427             tx = txdict[txid]
428             if not revert:
429
430                 undo = []
431                 for x in tx.get('inputs'):
432                     prevout_height, prevout_addr = self.remove_from_history( None, x.get('prevout_hash'), x.get('prevout_n'))
433                     undo.append( (prevout_height, prevout_addr) )
434                 undo_info[txid] = undo
435
436                 for x in tx.get('outputs'):
437                     hash_160 = bc_address_to_hash_160(x.get('address'))
438                     self.add_to_history( hash_160, txid, x.get('index'), block_height)
439                     
440             else:
441                 for x in tx.get('outputs'):
442                     hash_160 = bc_address_to_hash_160(x.get('address'))
443                     self.remove_from_history( hash_160, txid, x.get('index'))
444
445                 i = 0
446                 for x in tx.get('inputs'):
447                     prevout_height, prevout_addr = undo_info.get(txid)[i]
448                     i += 1
449
450                     # read the history into batch list
451                     if self.batch_list.get(prevout_addr) is None:
452                         self.batch_list[prevout_addr] = self.db.Get(prevout_addr)
453
454                     # re-add them to the history
455                     self.add_to_history( prevout_addr, x.get('prevout_hash'), x.get('prevout_n'), prevout_height)
456                     print_log( "new hist for", hash_160_to_bc_address(prevout_addr), self.deserialize(self.batch_list[prevout_addr]) )
457
458         # write
459         max_len = 0
460         max_addr = ''
461         t2 = time.time()
462
463         batch = leveldb.WriteBatch()
464         for addr, serialized_hist in self.batch_list.items():
465             batch.Put(addr, serialized_hist)
466             l = len(serialized_hist)
467             if l > max_len:
468                 max_len = l
469                 max_addr = addr
470
471         if not revert:
472             # add new created outputs
473             for txio, addr in self.batch_txio.items():
474                 batch.Put(txio, addr)
475             # delete spent inputs
476             for txi in block_inputs:
477                 batch.Delete(txi)
478             # add undo info 
479             self.write_undo_info(batch, block_height, undo_info)
480         else:
481             # restore spent inputs
482             for txio, addr in self.batch_txio.items():
483                 batch.Put(txio, addr)
484             # delete spent outputs
485             for txo in block_outputs:
486                 batch.Delete(txo)
487
488
489         # add the max
490         batch.Put('height', self.serialize( [(block_hash, block_height, 0)] ) )
491
492         # actual write
493         self.db.Write(batch, sync = sync)
494
495         t3 = time.time()
496         if t3 - t0 > 10 and not sync: 
497             print_log("block", block_height, 
498                       "parse:%0.2f "%(t00 - t0), 
499                       "read:%0.2f "%(t1 - t00), 
500                       "proc:%.2f "%(t2-t1), 
501                       "write:%.2f "%(t3-t2), 
502                       "max:", max_len, hash_160_to_bc_address(max_addr))
503
504         for h160 in self.batch_list.keys(): 
505             addr = hash_160_to_bc_address(h160)
506             self.invalidate_cache(addr)
507
508
509
510     def add_request(self, request):
511         # see if we can get if from cache. if not, add to queue
512         if self.process( request, cache_only = True) == -1:
513             self.queue.put(request)
514
515
516
517     def process(self, request, cache_only = False):
518         #print "abe process", request
519
520         message_id = request['id']
521         method = request['method']
522         params = request.get('params',[])
523         result = None
524         error = None
525
526         if method == 'blockchain.numblocks.subscribe':
527             result = self.height
528
529         elif method == 'blockchain.headers.subscribe':
530             result = self.header
531
532         elif method == 'blockchain.address.subscribe':
533             try:
534                 address = params[0]
535                 result = self.get_status(address, cache_only)
536                 self.watch_address(address)
537             except BaseException, e:
538                 error = str(e) + ': ' + address
539                 print_log( "error:", error )
540
541         elif method == 'blockchain.address.unsubscribe':
542             try:
543                 password = params[0]
544                 address = params[1]
545                 if password == self.config.get('server','password'):
546                     self.watched_addresses.remove(address)
547                     print_log('unsubscribed', address)
548                     result = "ok"
549                 else:
550                     print_log('incorrect password')
551                     result = "authentication error"
552             except BaseException, e:
553                 error = str(e) + ': ' + address
554                 print_log( "error:", error )
555
556         elif method == 'blockchain.address.get_history':
557             try:
558                 address = params[0]
559                 result = self.get_history( address, cache_only )
560             except BaseException, e:
561                 error = str(e) + ': ' + address
562                 print_log( "error:", error )
563
564         elif method == 'blockchain.block.get_header':
565             if cache_only: 
566                 result = -1
567             else:
568                 try:
569                     height = params[0]
570                     result = self.get_header( height ) 
571                 except BaseException, e:
572                     error = str(e) + ': %d'% height
573                     print_log( "error:", error )
574                     
575         elif method == 'blockchain.block.get_chunk':
576             if cache_only:
577                 result = -1
578             else:
579                 try:
580                     index = params[0]
581                     result = self.get_chunk( index ) 
582                 except BaseException, e:
583                     error = str(e) + ': %d'% index
584                     print_log( "error:", error)
585
586         elif method == 'blockchain.transaction.broadcast':
587             txo = self.bitcoind('sendrawtransaction', params)
588             print_log( "sent tx:", txo )
589             result = txo 
590
591         elif method == 'blockchain.transaction.get_merkle':
592             if cache_only:
593                 result = -1
594             else:
595                 try:
596                     tx_hash = params[0]
597                     tx_height = params[1]
598                     result = self.get_merkle(tx_hash, tx_height) 
599                 except BaseException, e:
600                     error = str(e) + ': ' + tx_hash
601                     print_log( "error:", error )
602                     
603         elif method == 'blockchain.transaction.get':
604             try:
605                 tx_hash = params[0]
606                 height = params[1]
607                 result = self.bitcoind('getrawtransaction', [tx_hash, 0, height] ) 
608             except BaseException, e:
609                 error = str(e) + ': ' + tx_hash
610                 print_log( "error:", error )
611
612         else:
613             error = "unknown method:%s"%method
614
615         if cache_only and result == -1: return -1
616
617         if error:
618             response = { 'id':message_id, 'error':error }
619             self.push_response(response)
620         elif result != '':
621             response = { 'id':message_id, 'result':result }
622             self.push_response(response)
623
624
625     def watch_address(self, addr):
626         if addr not in self.watched_addresses:
627             self.watched_addresses.append(addr)
628
629
630
631     def catch_up(self, sync = True):
632
633         t1 = time.time()
634
635         while not self.shared.stopped():
636
637             # are we done yet?
638             info = self.bitcoind('getinfo')
639             self.bitcoind_height = info.get('blocks')
640             bitcoind_block_hash = self.bitcoind('getblockhash', [self.bitcoind_height])
641             if self.last_hash == bitcoind_block_hash: 
642                 self.up_to_date = True
643                 break
644
645             # not done..
646             self.up_to_date = False
647             next_block_hash = self.bitcoind('getblockhash', [self.height+1])
648             next_block = self.bitcoind('getblock', [next_block_hash, 1])
649
650             # fixme: this is unsafe, if we revert when the undo info is not yet written 
651             revert = (random.randint(1, 100)==1) if self.is_test else False        
652
653             if (next_block.get('previousblockhash') == self.last_hash) and not revert:
654
655                 self.import_block(next_block, next_block_hash, self.height+1, sync)
656                 self.height = self.height + 1
657                 self.write_header(self.block2header(next_block), sync)
658                 self.last_hash = next_block_hash
659
660                 if (self.height)%100 == 0 and not sync: 
661                     t2 = time.time()
662                     print_log( "catch_up: block %d (%.3fs)"%( self.height, t2 - t1 ) )
663                     t1 = t2
664                     
665             else:
666                 # revert current block
667                 block = self.bitcoind('getblock', [self.last_hash, 1])
668                 print_log( "blockchain reorg", self.height, block.get('previousblockhash'), self.last_hash )
669                 self.import_block(block, self.last_hash, self.height, sync, revert=True)
670                 self.pop_header()
671                 self.flush_headers()
672
673                 self.height = self.height -1
674
675                 # read previous header from disk
676                 self.header = self.read_header(self.height)
677                 self.last_hash = self.hash_header(self.header)
678         
679
680         self.header = self.block2header(self.bitcoind('getblock', [self.last_hash]))
681
682
683
684             
685     def memorypool_update(self):
686
687         mempool_hashes = self.bitcoind('getrawmempool')
688
689         for tx_hash in mempool_hashes:
690             if tx_hash in self.mempool_hashes: continue
691
692             tx = self.get_transaction(tx_hash)
693             if not tx: continue
694
695             for x in tx.get('inputs'):
696                 txi = (x.get('prevout_hash') + int_to_hex(x.get('prevout_n'), 4)).decode('hex')
697                 try:
698                     h160 = self.db.Get(txi)
699                     addr = hash_160_to_bc_address(h160)
700                 except:
701                     continue
702                 l = self.mempool_addresses.get(tx_hash, [])
703                 if addr not in l: 
704                     l.append( addr )
705                     self.mempool_addresses[tx_hash] = l
706
707             for x in tx.get('outputs'):
708                 addr = x.get('address')
709                 l = self.mempool_addresses.get(tx_hash, [])
710                 if addr not in l: 
711                     l.append( addr )
712                     self.mempool_addresses[tx_hash] = l
713
714             self.mempool_hashes.append(tx_hash)
715
716         # remove older entries from mempool_hashes
717         self.mempool_hashes = mempool_hashes
718
719         # remove deprecated entries from mempool_addresses
720         for tx_hash, addresses in self.mempool_addresses.items():
721             if tx_hash not in self.mempool_hashes:
722                 self.mempool_addresses.pop(tx_hash)
723
724         # rebuild histories
725         new_mempool_hist = {}
726         for tx_hash, addresses in self.mempool_addresses.items():
727             for addr in addresses:
728                 h = new_mempool_hist.get(addr, [])
729                 if tx_hash not in h: 
730                     h.append( tx_hash )
731                 new_mempool_hist[addr] = h
732
733         for addr in new_mempool_hist.keys():
734             if addr in self.mempool_hist.keys():
735                 if self.mempool_hist[addr] != new_mempool_hist[addr]: 
736                     self.invalidate_cache(addr)
737             else:
738                 self.invalidate_cache(addr)
739
740         with self.mempool_lock:
741             self.mempool_hist = new_mempool_hist
742
743
744
745     def invalidate_cache(self, address):
746         with self.cache_lock:
747             if self.history_cache.has_key(address):
748                 print_log( "cache: invalidating", address )
749                 self.history_cache.pop(address)
750
751         if address in self.watched_addresses:
752             self.address_queue.put(address)
753
754
755
756     def main_iteration(self):
757
758         if self.shared.stopped(): 
759             print_log( "blockchain processor terminating" )
760             return
761
762         with self.dblock:
763             t1 = time.time()
764             self.catch_up()
765             t2 = time.time()
766
767         self.memorypool_update()
768         t3 = time.time()
769         # print "mempool:", len(self.mempool_addresses), len(self.mempool_hist), "%.3fs"%(t3 - t2)
770
771
772         if self.sent_height != self.height:
773             self.sent_height = self.height
774             self.push_response({ 'id': None, 'method':'blockchain.numblocks.subscribe', 'params':[self.height] })
775
776         if self.sent_header != self.header:
777             print_log( "blockchain: %d (%.3fs)"%( self.height, t2 - t1 ) )
778             self.sent_header = self.header
779             self.push_response({ 'id': None, 'method':'blockchain.headers.subscribe', 'params':[self.header] })
780
781         while True:
782             try:
783                 addr = self.address_queue.get(False)
784             except:
785                 break
786             if addr in self.watched_addresses:
787                 status = self.get_status( addr )
788                 self.push_response({ 'id': None, 'method':'blockchain.address.subscribe', 'params':[addr, status] })
789
790         if not self.shared.stopped(): 
791             threading.Timer(10, self.main_iteration).start()
792         else:
793             print_log( "blockchain processor terminating" )
794
795
796
797