1 from Abe.util import hash_to_address, decode_check_address
2 from Abe.DataStore import DataStore as Datastore_class
3 from Abe import DataStore, readconf, BCDataStream, deserialize, util, base58
7 import thread, traceback, sys, urllib, operator
8 from json import dumps, loads
9 from Queue import Queue
10 import time, threading
14 class AbeStore(Datastore_class):
18 def __init__(self, config):
19 conf = DataStore.CONFIG_DEFAULTS
20 args, argv = readconf.parse_argv( [], conf)
21 args.dbtype = config.get('database','type')
22 if args.dbtype == 'sqlite3':
23 args.connect_args = { 'database' : config.get('database','database') }
24 elif args.dbtype == 'MySQLdb':
25 args.connect_args = { 'db' : config.get('database','database'), 'user' : config.get('database','username'), 'passwd' : config.get('database','password') }
26 elif args.dbtype == 'psycopg2':
27 args.connect_args = { 'database' : config.get('database','database') }
29 Datastore_class.__init__(self,args)
31 self.sql_limit = int( config.get('database','limit') )
34 self.bitcoind_url = 'http://%s:%s@%s:%s/' % ( config.get('bitcoind','user'), config.get('bitcoind','password'), config.get('bitcoind','host'), config.get('bitcoind','port'))
36 self.address_queue = Queue()
38 self.dblock = thread.allocate_lock()
42 def import_tx(self, tx, is_coinbase):
43 tx_id = super(AbeStore, self).import_tx(tx, is_coinbase)
44 self.last_tx_id = tx_id
50 def import_block(self, b, chain_ids=frozenset()):
52 block_id = super(AbeStore, self).import_block(b, chain_ids)
53 for pos in xrange(len(b['transactions'])):
54 tx = b['transactions'][pos]
56 tx['hash'] = util.double_sha256(tx['tx'])
57 tx_id = self.tx_find_id_and_value(tx)
59 self.update_tx_cache(tx_id)
61 print "error: import_block: no tx_id"
65 def update_tx_cache(self, txid):
66 inrows = self.get_tx_inputs(txid, False)
68 _hash = self.binout(row[6])
70 #print "WARNING: missing tx_in for tx", txid
73 address = hash_to_address(chr(addrtype), _hash)
74 if self.tx_cache.has_key(address):
75 print "cache: invalidating", address
76 self.tx_cache.pop(address)
77 self.address_queue.put(address)
79 outrows = self.get_tx_outputs(txid, False)
81 _hash = self.binout(row[6])
83 #print "WARNING: missing tx_out for tx", txid
86 address = hash_to_address(chr(addrtype), _hash)
87 if self.tx_cache.has_key(address):
88 print "cache: invalidating", address
89 self.tx_cache.pop(address)
90 self.address_queue.put(address)
92 def safe_sql(self,sql, params=(), lock=True):
96 if lock: self.dblock.acquire()
97 ret = self.selectall(sql,params)
100 traceback.print_exc(file=sys.stdout)
102 if lock: self.dblock.release()
105 raise BaseException('sql error')
110 def get_tx_outputs(self, tx_id, lock=True):
111 return self.safe_sql("""SELECT
113 txout.txout_scriptPubKey,
120 LEFT JOIN txin ON (txin.txout_id = txout.txout_id)
121 LEFT JOIN pubkey ON (pubkey.pubkey_id = txout.pubkey_id)
122 LEFT JOIN tx nexttx ON (txin.tx_id = nexttx.tx_id)
123 WHERE txout.tx_id = %d
124 ORDER BY txout.txout_pos
125 """%(tx_id), (), lock)
127 def get_tx_inputs(self, tx_id, lock=True):
128 return self.safe_sql(""" SELECT
132 COALESCE(prevtx.tx_hash, u.txout_tx_hash),
134 COALESCE(txout.txout_pos, u.txout_pos),
137 LEFT JOIN txout ON (txout.txout_id = txin.txout_id)
138 LEFT JOIN pubkey ON (pubkey.pubkey_id = txout.pubkey_id)
139 LEFT JOIN tx prevtx ON (txout.tx_id = prevtx.tx_id)
140 LEFT JOIN unlinked_txin u ON (u.txin_id = txin.txin_id)
141 WHERE txin.tx_id = %d
142 ORDER BY txin.txin_pos
143 """%(tx_id,), (), lock)
146 def get_address_out_rows(self, dbhash):
147 out = self.safe_sql(""" SELECT
157 FROM chain_candidate cc
158 JOIN block b ON (b.block_id = cc.block_id)
159 JOIN block_tx ON (block_tx.block_id = b.block_id)
160 JOIN tx ON (tx.tx_id = block_tx.tx_id)
161 JOIN txin ON (txin.tx_id = tx.tx_id)
162 JOIN txout prevout ON (txin.txout_id = prevout.txout_id)
163 JOIN pubkey ON (pubkey.pubkey_id = prevout.pubkey_id)
164 WHERE pubkey.pubkey_hash = ?
165 AND cc.in_longest = 1
166 LIMIT ? """, (dbhash,self.sql_limit))
168 if len(out)==self.sql_limit:
169 raise BaseException('limit reached')
172 def get_address_out_rows_memorypool(self, dbhash):
173 out = self.safe_sql(""" SELECT
180 JOIN txin ON (txin.tx_id = tx.tx_id)
181 JOIN txout prevout ON (txin.txout_id = prevout.txout_id)
182 JOIN pubkey ON (pubkey.pubkey_id = prevout.pubkey_id)
183 WHERE pubkey.pubkey_hash = ?
184 LIMIT ? """, (dbhash,self.sql_limit))
186 if len(out)==self.sql_limit:
187 raise BaseException('limit reached')
190 def get_address_in_rows(self, dbhash):
191 out = self.safe_sql(""" SELECT
201 FROM chain_candidate cc
202 JOIN block b ON (b.block_id = cc.block_id)
203 JOIN block_tx ON (block_tx.block_id = b.block_id)
204 JOIN tx ON (tx.tx_id = block_tx.tx_id)
205 JOIN txout ON (txout.tx_id = tx.tx_id)
206 JOIN pubkey ON (pubkey.pubkey_id = txout.pubkey_id)
207 WHERE pubkey.pubkey_hash = ?
208 AND cc.in_longest = 1
209 LIMIT ? """, (dbhash,self.sql_limit))
211 if len(out)==self.sql_limit:
212 raise BaseException('limit reached')
215 def get_address_in_rows_memorypool(self, dbhash):
216 out = self.safe_sql( """ SELECT
223 JOIN txout ON (txout.tx_id = tx.tx_id)
224 JOIN pubkey ON (pubkey.pubkey_id = txout.pubkey_id)
225 WHERE pubkey.pubkey_hash = ?
226 LIMIT ? """, (dbhash,self.sql_limit))
228 if len(out)==self.sql_limit:
229 raise BaseException('limit reached')
232 def get_history(self, addr):
234 cached_version = self.tx_cache.get( addr )
235 if cached_version is not None:
236 return cached_version
238 version, binaddr = decode_check_address(addr)
242 dbhash = self.binin(binaddr)
244 rows += self.get_address_out_rows( dbhash )
245 rows += self.get_address_in_rows( dbhash )
252 nTime, chain_id, height, is_in, blk_hash, tx_hash, tx_id, pos, value = row
254 print "cannot unpack row", row
256 tx_hash = self.hashout_hex(tx_hash)
258 "timestamp": int(nTime),
259 "height": int(height),
260 "is_input": int(is_in),
261 "block_hash": self.hashout_hex(blk_hash),
268 txpoints.append(txpoint)
269 known_tx.append(self.hashout_hex(tx_hash))
272 # todo: sort them really...
273 txpoints = sorted(txpoints, key=operator.itemgetter("timestamp"))
277 rows += self.get_address_in_rows_memorypool( dbhash )
278 rows += self.get_address_out_rows_memorypool( dbhash )
279 address_has_mempool = False
282 is_in, tx_hash, tx_id, pos, value = row
283 tx_hash = self.hashout_hex(tx_hash)
284 if tx_hash in known_tx:
287 # discard transactions that are too old
288 if self.last_tx_id - tx_id > 50000:
289 print "discarding tx id", tx_id
292 # this means that pending transactions were added to the db, even if they are not returned by getmemorypool
293 address_has_mempool = True
295 #print "mempool", tx_hash
299 "is_input": int(is_in),
300 "block_hash": 'mempool',
306 txpoints.append(txpoint)
309 for txpoint in txpoints:
310 tx_id = txpoint['tx_id']
313 inrows = self.get_tx_inputs(tx_id)
315 _hash = self.binout(row[6])
317 #print "WARNING: missing tx_in for tx", tx_id, addr
319 address = hash_to_address(chr(addrtype), _hash)
320 txinputs.append(address)
321 txpoint['inputs'] = txinputs
323 outrows = self.get_tx_outputs(tx_id)
325 _hash = self.binout(row[6])
327 #print "WARNING: missing tx_out for tx", tx_id, addr
329 address = hash_to_address(chr(addrtype), _hash)
330 txoutputs.append(address)
331 txpoint['outputs'] = txoutputs
333 # for all unspent inputs, I want their scriptpubkey. (actually I could deduce it from the address)
334 if not txpoint['is_input']:
335 # detect if already redeemed...
337 if row[6] == dbhash: break
340 #row = self.get_tx_output(tx_id,dbhash)
341 # pos, script, value, o_hash, o_id, o_pos, binaddr = row
342 # if not redeemed, we add the script
344 if not row[4]: txpoint['raw_output_script'] = row[1]
349 # do not cache mempool results because statuses are ambiguous
350 if not address_has_mempool:
351 self.tx_cache[addr] = txpoints
356 def get_status(self,addr):
357 # get address status, i.e. the last block for that address.
358 tx_points = self.get_history(addr)
362 lastpoint = tx_points[-1]
363 status = lastpoint['block_hash']
364 # this is a temporary hack; move it up once old clients have disappeared
365 if status == 'mempool': # and session['version'] != "old":
366 status = status + ':%d'% len(tx_points)
371 def memorypool_update(store):
373 ds = BCDataStream.BCDataStream()
374 postdata = dumps({"method": 'getmemorypool', 'params': [], 'id':'jsonrpc'})
376 respdata = urllib.urlopen(store.bitcoind_url, postdata).read()
378 if r['error'] != None:
381 v = r['result'].get('transactions')
384 ds.write(hextx.decode('hex'))
385 tx = deserialize.parse_Transaction(ds)
386 tx['hash'] = util.double_sha256(tx['tx'])
387 tx_hash = store.hashin(tx['hash'])
389 if store.tx_find_id_and_value(tx):
392 tx_id = store.import_tx(tx, False)
393 store.update_tx_cache(tx_id)
399 def send_tx(self,tx):
400 postdata = dumps({"method": 'importtransaction', 'params': [tx], 'id':'jsonrpc'})
401 respdata = urllib.urlopen(self.bitcoind_url, postdata).read()
403 if r['error'] != None:
404 msg = r['error'].get('message')
405 out = "error: transaction rejected by memorypool: " + msg + "\n" + tx
411 def main_iteration(store):
414 store.memorypool_update()
415 block_number = store.get_block_number(1)
422 # if there is an exception, do rollback and then re-raise the exception
423 for dircfg in store.datadirs:
425 store.catch_up_dir(dircfg)
427 store.log.exception("Failed to catch up %s", dircfg)
434 from processor import Processor
436 class BlockchainProcessor(Processor):
438 def __init__(self, config):
439 Processor.__init__(self)
440 self.store = AbeStore(config)
441 self.block_number = -1
442 self.watched_addresses = []
445 n = self.store.main_iteration()
446 print "blockchain: %d blocks"%n
448 threading.Timer(10, self.run_store_iteration).start()
450 def process(self, request):
451 #print "abe process", request
453 message_id = request['id']
454 method = request['method']
455 params = request.get('params',[])
459 if method == 'blockchain.numblocks.subscribe':
460 result = self.block_number
462 elif method == 'blockchain.address.subscribe':
465 result = self.store.get_status(address)
466 self.watch_address(address)
467 except BaseException, e:
468 error = str(e) + ': ' + address
469 print "error:", error
471 elif method == 'blockchain.address.get_history':
474 result = self.store.get_history( address )
475 except BaseException, e:
476 error = str(e) + ': ' + address
477 print "error:", error
479 elif method == 'blockchain.transaction.broadcast':
480 txo = self.store.send_tx(params[0])
481 print "sent tx:", txo
485 error = "unknown method:%s"%method
489 response = { 'id':message_id, 'error':error }
490 self.push_response(response)
492 response = { 'id':message_id, 'result':result }
493 self.push_response(response)
496 def watch_address(self, addr):
497 if addr not in self.watched_addresses:
498 self.watched_addresses.append(addr)
501 def run_store_iteration(self):
504 block_number = self.store.main_iteration()
506 traceback.print_exc(file=sys.stdout)
510 if self.shared.stopped():
514 if self.block_number != block_number:
515 self.block_number = block_number
516 print "block number:", self.block_number
517 self.push_response({ 'method':'blockchain.numblocks.subscribe', 'params':[self.block_number] })
521 addr = self.store.address_queue.get(False)
524 if addr in self.watched_addresses:
525 status = self.store.get_status( addr )
526 self.push_response({ 'method':'blockchain.address.subscribe', 'params':[addr, status] })
528 threading.Timer(10, self.run_store_iteration).start()