1 from Abe.util import hash_to_address, decode_check_address
2 from Abe.DataStore import DataStore as Datastore_class
3 from Abe import DataStore, readconf, BCDataStream, deserialize, util, base58
7 import thread, traceback, sys, urllib, operator
8 from json import dumps, loads
9 from Queue import Queue
10 import time, threading
14 class AbeStore(Datastore_class):
16 def __init__(self, config):
17 conf = DataStore.CONFIG_DEFAULTS
18 args, argv = readconf.parse_argv( [], conf)
19 args.dbtype = config.get('database','type')
20 if args.dbtype == 'sqlite3':
21 args.connect_args = { 'database' : config.get('database','database') }
22 elif args.dbtype == 'MySQLdb':
23 args.connect_args = { 'db' : config.get('database','database'), 'user' : config.get('database','username'), 'passwd' : config.get('database','password') }
24 elif args.dbtype == 'psycopg2':
25 args.connect_args = { 'database' : config.get('database','database') }
27 Datastore_class.__init__(self,args)
29 self.sql_limit = int( config.get('database','limit') )
32 self.bitcoind_url = 'http://%s:%s@%s:%s/' % ( config.get('bitcoind','user'), config.get('bitcoind','password'), config.get('bitcoind','host'), config.get('bitcoind','port'))
34 self.address_queue = Queue()
36 self.dblock = thread.allocate_lock()
40 def import_tx(self, tx, is_coinbase):
41 tx_id = super(AbeStore, self).import_tx(tx, is_coinbase)
42 self.last_tx_id = tx_id
48 def import_block(self, b, chain_ids=frozenset()):
50 block_id = super(AbeStore, self).import_block(b, chain_ids)
51 for pos in xrange(len(b['transactions'])):
52 tx = b['transactions'][pos]
54 tx['hash'] = util.double_sha256(tx['tx'])
55 tx_id = self.tx_find_id_and_value(tx)
57 self.update_tx_cache(tx_id)
59 print "error: import_block: no tx_id"
63 def update_tx_cache(self, txid):
64 inrows = self.get_tx_inputs(txid, False)
66 _hash = self.binout(row[6])
68 #print "WARNING: missing tx_in for tx", txid
71 address = hash_to_address(chr(0), _hash)
72 if self.tx_cache.has_key(address):
73 print "cache: invalidating", address
74 self.tx_cache.pop(address)
75 self.address_queue.put(address)
77 outrows = self.get_tx_outputs(txid, False)
79 _hash = self.binout(row[6])
81 #print "WARNING: missing tx_out for tx", txid
84 address = hash_to_address(chr(0), _hash)
85 if self.tx_cache.has_key(address):
86 print "cache: invalidating", address
87 self.tx_cache.pop(address)
88 self.address_queue.put(address)
90 def safe_sql(self,sql, params=(), lock=True):
94 if lock: self.dblock.acquire()
95 ret = self.selectall(sql,params)
98 traceback.print_exc(file=sys.stdout)
100 if lock: self.dblock.release()
103 raise BaseException('sql error')
108 def get_tx_outputs(self, tx_id, lock=True):
109 return self.safe_sql("""SELECT
111 txout.txout_scriptPubKey,
118 LEFT JOIN txin ON (txin.txout_id = txout.txout_id)
119 LEFT JOIN pubkey ON (pubkey.pubkey_id = txout.pubkey_id)
120 LEFT JOIN tx nexttx ON (txin.tx_id = nexttx.tx_id)
121 WHERE txout.tx_id = %d
122 ORDER BY txout.txout_pos
123 """%(tx_id), (), lock)
125 def get_tx_inputs(self, tx_id, lock=True):
126 return self.safe_sql(""" SELECT
130 COALESCE(prevtx.tx_hash, u.txout_tx_hash),
132 COALESCE(txout.txout_pos, u.txout_pos),
135 LEFT JOIN txout ON (txout.txout_id = txin.txout_id)
136 LEFT JOIN pubkey ON (pubkey.pubkey_id = txout.pubkey_id)
137 LEFT JOIN tx prevtx ON (txout.tx_id = prevtx.tx_id)
138 LEFT JOIN unlinked_txin u ON (u.txin_id = txin.txin_id)
139 WHERE txin.tx_id = %d
140 ORDER BY txin.txin_pos
141 """%(tx_id,), (), lock)
144 def get_address_out_rows(self, dbhash):
145 out = self.safe_sql(""" SELECT
155 FROM chain_candidate cc
156 JOIN block b ON (b.block_id = cc.block_id)
157 JOIN block_tx ON (block_tx.block_id = b.block_id)
158 JOIN tx ON (tx.tx_id = block_tx.tx_id)
159 JOIN txin ON (txin.tx_id = tx.tx_id)
160 JOIN txout prevout ON (txin.txout_id = prevout.txout_id)
161 JOIN pubkey ON (pubkey.pubkey_id = prevout.pubkey_id)
162 WHERE pubkey.pubkey_hash = ?
163 AND cc.in_longest = 1
164 LIMIT ? """, (dbhash,self.sql_limit))
166 if len(out)==self.sql_limit:
167 raise BaseException('limit reached')
170 def get_address_out_rows_memorypool(self, dbhash):
171 out = self.safe_sql(""" SELECT
178 JOIN txin ON (txin.tx_id = tx.tx_id)
179 JOIN txout prevout ON (txin.txout_id = prevout.txout_id)
180 JOIN pubkey ON (pubkey.pubkey_id = prevout.pubkey_id)
181 WHERE pubkey.pubkey_hash = ?
182 LIMIT ? """, (dbhash,self.sql_limit))
184 if len(out)==self.sql_limit:
185 raise BaseException('limit reached')
188 def get_address_in_rows(self, dbhash):
189 out = self.safe_sql(""" SELECT
199 FROM chain_candidate cc
200 JOIN block b ON (b.block_id = cc.block_id)
201 JOIN block_tx ON (block_tx.block_id = b.block_id)
202 JOIN tx ON (tx.tx_id = block_tx.tx_id)
203 JOIN txout ON (txout.tx_id = tx.tx_id)
204 JOIN pubkey ON (pubkey.pubkey_id = txout.pubkey_id)
205 WHERE pubkey.pubkey_hash = ?
206 AND cc.in_longest = 1
207 LIMIT ? """, (dbhash,self.sql_limit))
209 if len(out)==self.sql_limit:
210 raise BaseException('limit reached')
213 def get_address_in_rows_memorypool(self, dbhash):
214 out = self.safe_sql( """ SELECT
221 JOIN txout ON (txout.tx_id = tx.tx_id)
222 JOIN pubkey ON (pubkey.pubkey_id = txout.pubkey_id)
223 WHERE pubkey.pubkey_hash = ?
224 LIMIT ? """, (dbhash,self.sql_limit))
226 if len(out)==self.sql_limit:
227 raise BaseException('limit reached')
230 def get_history(self, addr):
232 cached_version = self.tx_cache.get( addr )
233 if cached_version is not None:
234 return cached_version
236 version, binaddr = decode_check_address(addr)
240 dbhash = self.binin(binaddr)
242 rows += self.get_address_out_rows( dbhash )
243 rows += self.get_address_in_rows( dbhash )
250 nTime, chain_id, height, is_in, blk_hash, tx_hash, tx_id, pos, value = row
252 print "cannot unpack row", row
254 tx_hash = self.hashout_hex(tx_hash)
256 "timestamp": int(nTime),
257 "height": int(height),
258 "is_input": int(is_in),
259 "block_hash": self.hashout_hex(blk_hash),
266 txpoints.append(txpoint)
267 known_tx.append(self.hashout_hex(tx_hash))
270 # todo: sort them really...
271 txpoints = sorted(txpoints, key=operator.itemgetter("timestamp"))
275 rows += self.get_address_in_rows_memorypool( dbhash )
276 rows += self.get_address_out_rows_memorypool( dbhash )
277 address_has_mempool = False
280 is_in, tx_hash, tx_id, pos, value = row
281 tx_hash = self.hashout_hex(tx_hash)
282 if tx_hash in known_tx:
285 # this means that pending transactions were added to the db, even if they are not returned by getmemorypool
286 address_has_mempool = True
288 # discard transactions that are too old
289 if self.last_tx_id - tx_id > 10000:
290 print "discarding tx id", tx_id
294 #print "mempool", tx_hash
298 "is_input": int(is_in),
299 "block_hash": 'mempool',
305 txpoints.append(txpoint)
308 for txpoint in txpoints:
309 tx_id = txpoint['tx_id']
312 inrows = self.get_tx_inputs(tx_id)
314 _hash = self.binout(row[6])
316 #print "WARNING: missing tx_in for tx", tx_id, addr
318 address = hash_to_address(chr(0), _hash)
319 txinputs.append(address)
320 txpoint['inputs'] = txinputs
322 outrows = self.get_tx_outputs(tx_id)
324 _hash = self.binout(row[6])
326 #print "WARNING: missing tx_out for tx", tx_id, addr
328 address = hash_to_address(chr(0), _hash)
329 txoutputs.append(address)
330 txpoint['outputs'] = txoutputs
332 # for all unspent inputs, I want their scriptpubkey. (actually I could deduce it from the address)
333 if not txpoint['is_input']:
334 # detect if already redeemed...
336 if row[6] == dbhash: break
339 #row = self.get_tx_output(tx_id,dbhash)
340 # pos, script, value, o_hash, o_id, o_pos, binaddr = row
341 # if not redeemed, we add the script
343 if not row[4]: txpoint['raw_output_script'] = row[1]
348 # do not cache mempool results because statuses are ambiguous
349 if not address_has_mempool:
350 self.tx_cache[addr] = txpoints
355 def get_status(self,addr):
356 # get address status, i.e. the last block for that address.
357 tx_points = self.get_history(addr)
361 lastpoint = tx_points[-1]
362 status = lastpoint['block_hash']
363 # this is a temporary hack; move it up once old clients have disappeared
364 if status == 'mempool': # and session['version'] != "old":
365 status = status + ':%d'% len(tx_points)
370 def memorypool_update(store):
372 ds = BCDataStream.BCDataStream()
373 postdata = dumps({"method": 'getmemorypool', 'params': [], 'id':'jsonrpc'})
375 respdata = urllib.urlopen(store.bitcoind_url, postdata).read()
377 if r['error'] != None:
380 v = r['result'].get('transactions')
383 ds.write(hextx.decode('hex'))
384 tx = deserialize.parse_Transaction(ds)
385 tx['hash'] = util.double_sha256(tx['tx'])
386 tx_hash = store.hashin(tx['hash'])
388 if store.tx_find_id_and_value(tx):
391 tx_id = store.import_tx(tx, False)
392 store.update_tx_cache(tx_id)
398 def send_tx(self,tx):
399 postdata = dumps({"method": 'importtransaction', 'params': [tx], 'id':'jsonrpc'})
400 respdata = urllib.urlopen(self.bitcoind_url, postdata).read()
402 if r['error'] != None:
403 msg = r['error'].get('message')
404 out = "error: transaction rejected by memorypool: " + msg + "\n" + tx
410 def main_iteration(store):
413 store.memorypool_update()
414 block_number = store.get_block_number(1)
421 # if there is an exception, do rollback and then re-raise the exception
422 for dircfg in store.datadirs:
424 store.catch_up_dir(dircfg)
426 store.log.exception("Failed to catch up %s", dircfg)
433 from processor import Processor
435 class BlockchainProcessor(Processor):
437 def __init__(self, config):
438 Processor.__init__(self)
439 self.store = AbeStore(config)
440 self.block_number = -1
441 self.watched_addresses = []
442 threading.Timer(10, self.run_store_iteration).start()
444 def process(self, request):
445 #print "abe process", request
447 message_id = request['id']
448 method = request['method']
449 params = request.get('params',[])
453 if method == 'blockchain.numblocks.subscribe':
454 result = self.block_number
456 elif method == 'blockchain.address.subscribe':
459 result = self.store.get_status(address)
460 self.watch_address(address)
461 except BaseException, e:
462 error = str(e) + ': ' + address
463 print "error:", error
465 elif method == 'blockchain.address.get_history':
468 result = self.store.get_history( address )
469 except BaseException, e:
470 error = str(e) + ': ' + address
471 print "error:", error
473 elif method == 'blockchain.transaction.broadcast':
474 txo = self.store.send_tx(params[0])
475 print "sent tx:", txo
479 error = "unknown method:%s"%method
483 response = { 'id':message_id, 'error':error }
484 self.push_response(response)
486 response = { 'id':message_id, 'result':result }
487 self.push_response(response)
490 def watch_address(self, addr):
491 if addr not in self.watched_addresses:
492 self.watched_addresses.append(addr)
495 def run_store_iteration(self):
498 block_number = self.store.main_iteration()
500 traceback.print_exc(file=sys.stdout)
504 if self.shared.stopped():
508 if self.block_number != block_number:
509 self.block_number = block_number
510 print "block number:", self.block_number
511 self.push_response({ 'method':'blockchain.numblocks.subscribe', 'params':[self.block_number] })
515 addr = self.store.address_queue.get(False)
518 if addr in self.watched_addresses:
519 status = self.store.get_status( addr )
520 self.push_response({ 'method':'blockchain.address.subscribe', 'params':[addr, status] })
522 threading.Timer(10, self.run_store_iteration).start()