1 from Abe.util import hash_to_address, decode_check_address
2 from Abe.DataStore import DataStore as Datastore_class
3 from Abe import DataStore, readconf, BCDataStream, deserialize, util, base58
7 import thread, traceback, sys, urllib, operator
8 from json import dumps, loads
9 from Queue import Queue
10 import time, threading
13 class AbeStore(Datastore_class):
15 def __init__(self, config):
16 conf = DataStore.CONFIG_DEFAULTS
17 args, argv = readconf.parse_argv( [], conf)
18 args.dbtype = config.get('database','type')
19 if args.dbtype == 'sqlite3':
20 args.connect_args = { 'database' : config.get('database','database') }
21 elif args.dbtype == 'MySQLdb':
22 args.connect_args = { 'db' : config.get('database','database'), 'user' : config.get('database','username'), 'passwd' : config.get('database','password') }
23 elif args.dbtype == 'psycopg2':
24 args.connect_args = { 'database' : config.get('database','database') }
26 coin = config.get('server', 'coin')
28 if coin == 'litecoin':
29 print 'Litecoin settings:'
30 datadir = config.get('server','datadir')
31 print ' datadir = ' + datadir
32 args.datadir = [{"dirname":datadir,"chain":"Litecoin","code3":"LTC","address_version":"\u0030"}]
33 print ' addrtype = 48'
36 Datastore_class.__init__(self,args)
38 self.sql_limit = int( config.get('database','limit') )
41 self.bitcoind_url = 'http://%s:%s@%s:%s/' % ( config.get('bitcoind','user'), config.get('bitcoind','password'), config.get('bitcoind','host'), config.get('bitcoind','port'))
43 self.address_queue = Queue()
45 self.dblock = thread.allocate_lock()
49 def import_tx(self, tx, is_coinbase):
50 tx_id = super(AbeStore, self).import_tx(tx, is_coinbase)
51 self.last_tx_id = tx_id
57 def import_block(self, b, chain_ids=frozenset()):
59 block_id = super(AbeStore, self).import_block(b, chain_ids)
60 for pos in xrange(len(b['transactions'])):
61 tx = b['transactions'][pos]
63 tx['hash'] = util.double_sha256(tx['tx'])
64 tx_id = self.tx_find_id_and_value(tx)
66 self.update_tx_cache(tx_id)
68 print "error: import_block: no tx_id"
72 def update_tx_cache(self, txid):
73 inrows = self.get_tx_inputs(txid, False)
75 _hash = self.binout(row[6])
77 #print "WARNING: missing tx_in for tx", txid
80 address = hash_to_address(chr(self.addrtype), _hash)
81 if self.tx_cache.has_key(address):
82 print "cache: invalidating", address
83 self.tx_cache.pop(address)
84 self.address_queue.put(address)
86 outrows = self.get_tx_outputs(txid, False)
88 _hash = self.binout(row[6])
90 #print "WARNING: missing tx_out for tx", txid
93 address = hash_to_address(chr(self.addrtype), _hash)
94 if self.tx_cache.has_key(address):
95 print "cache: invalidating", address
96 self.tx_cache.pop(address)
97 self.address_queue.put(address)
99 def safe_sql(self,sql, params=(), lock=True):
103 if lock: self.dblock.acquire()
104 ret = self.selectall(sql,params)
107 traceback.print_exc(file=sys.stdout)
109 if lock: self.dblock.release()
112 raise BaseException('sql error')
117 def get_tx_outputs(self, tx_id, lock=True):
118 return self.safe_sql("""SELECT
120 txout.txout_scriptPubKey,
127 LEFT JOIN txin ON (txin.txout_id = txout.txout_id)
128 LEFT JOIN pubkey ON (pubkey.pubkey_id = txout.pubkey_id)
129 LEFT JOIN tx nexttx ON (txin.tx_id = nexttx.tx_id)
130 WHERE txout.tx_id = %d
131 ORDER BY txout.txout_pos
132 """%(tx_id), (), lock)
134 def get_tx_inputs(self, tx_id, lock=True):
135 return self.safe_sql(""" SELECT
139 COALESCE(prevtx.tx_hash, u.txout_tx_hash),
141 COALESCE(txout.txout_pos, u.txout_pos),
144 LEFT JOIN txout ON (txout.txout_id = txin.txout_id)
145 LEFT JOIN pubkey ON (pubkey.pubkey_id = txout.pubkey_id)
146 LEFT JOIN tx prevtx ON (txout.tx_id = prevtx.tx_id)
147 LEFT JOIN unlinked_txin u ON (u.txin_id = txin.txin_id)
148 WHERE txin.tx_id = %d
149 ORDER BY txin.txin_pos
150 """%(tx_id,), (), lock)
153 def get_address_out_rows(self, dbhash):
154 out = self.safe_sql(""" SELECT
164 FROM chain_candidate cc
165 JOIN block b ON (b.block_id = cc.block_id)
166 JOIN block_tx ON (block_tx.block_id = b.block_id)
167 JOIN tx ON (tx.tx_id = block_tx.tx_id)
168 JOIN txin ON (txin.tx_id = tx.tx_id)
169 JOIN txout prevout ON (txin.txout_id = prevout.txout_id)
170 JOIN pubkey ON (pubkey.pubkey_id = prevout.pubkey_id)
171 WHERE pubkey.pubkey_hash = ?
172 AND cc.in_longest = 1
173 LIMIT ? """, (dbhash,self.sql_limit))
175 if len(out)==self.sql_limit:
176 raise BaseException('limit reached')
179 def get_address_out_rows_memorypool(self, dbhash):
180 out = self.safe_sql(""" SELECT
187 JOIN txin ON (txin.tx_id = tx.tx_id)
188 JOIN txout prevout ON (txin.txout_id = prevout.txout_id)
189 JOIN pubkey ON (pubkey.pubkey_id = prevout.pubkey_id)
190 WHERE pubkey.pubkey_hash = ?
191 LIMIT ? """, (dbhash,self.sql_limit))
193 if len(out)==self.sql_limit:
194 raise BaseException('limit reached')
197 def get_address_in_rows(self, dbhash):
198 out = self.safe_sql(""" SELECT
208 FROM chain_candidate cc
209 JOIN block b ON (b.block_id = cc.block_id)
210 JOIN block_tx ON (block_tx.block_id = b.block_id)
211 JOIN tx ON (tx.tx_id = block_tx.tx_id)
212 JOIN txout ON (txout.tx_id = tx.tx_id)
213 JOIN pubkey ON (pubkey.pubkey_id = txout.pubkey_id)
214 WHERE pubkey.pubkey_hash = ?
215 AND cc.in_longest = 1
216 LIMIT ? """, (dbhash,self.sql_limit))
218 if len(out)==self.sql_limit:
219 raise BaseException('limit reached')
222 def get_address_in_rows_memorypool(self, dbhash):
223 out = self.safe_sql( """ SELECT
230 JOIN txout ON (txout.tx_id = tx.tx_id)
231 JOIN pubkey ON (pubkey.pubkey_id = txout.pubkey_id)
232 WHERE pubkey.pubkey_hash = ?
233 LIMIT ? """, (dbhash,self.sql_limit))
235 if len(out)==self.sql_limit:
236 raise BaseException('limit reached')
239 def get_history(self, addr):
241 cached_version = self.tx_cache.get( addr )
242 if cached_version is not None:
243 return cached_version
245 version, binaddr = decode_check_address(addr)
249 dbhash = self.binin(binaddr)
251 rows += self.get_address_out_rows( dbhash )
252 rows += self.get_address_in_rows( dbhash )
259 nTime, chain_id, height, is_in, blk_hash, tx_hash, tx_id, pos, value = row
261 print "cannot unpack row", row
263 tx_hash = self.hashout_hex(tx_hash)
265 "timestamp": int(nTime),
266 "height": int(height),
267 "is_input": int(is_in),
268 "block_hash": self.hashout_hex(blk_hash),
275 txpoints.append(txpoint)
276 known_tx.append(self.hashout_hex(tx_hash))
279 # todo: sort them really...
280 txpoints = sorted(txpoints, key=operator.itemgetter("timestamp"))
284 rows += self.get_address_in_rows_memorypool( dbhash )
285 rows += self.get_address_out_rows_memorypool( dbhash )
286 address_has_mempool = False
289 is_in, tx_hash, tx_id, pos, value = row
290 tx_hash = self.hashout_hex(tx_hash)
291 if tx_hash in known_tx:
294 # discard transactions that are too old
295 if self.last_tx_id - tx_id > 50000:
296 print "discarding tx id", tx_id
299 # this means that pending transactions were added to the db, even if they are not returned by getmemorypool
300 address_has_mempool = True
302 #print "mempool", tx_hash
306 "is_input": int(is_in),
307 "block_hash": 'mempool',
313 txpoints.append(txpoint)
316 for txpoint in txpoints:
317 tx_id = txpoint['tx_id']
320 inrows = self.get_tx_inputs(tx_id)
322 _hash = self.binout(row[6])
324 #print "WARNING: missing tx_in for tx", tx_id, addr
326 address = hash_to_address(chr(self.addrtype), _hash)
327 txinputs.append(address)
328 txpoint['inputs'] = txinputs
330 outrows = self.get_tx_outputs(tx_id)
332 _hash = self.binout(row[6])
334 #print "WARNING: missing tx_out for tx", tx_id, addr
336 address = hash_to_address(chr(self.addrtype), _hash)
337 txoutputs.append(address)
338 txpoint['outputs'] = txoutputs
340 # for all unspent inputs, I want their scriptpubkey. (actually I could deduce it from the address)
341 if not txpoint['is_input']:
342 # detect if already redeemed...
344 if row[6] == dbhash: break
347 #row = self.get_tx_output(tx_id,dbhash)
348 # pos, script, value, o_hash, o_id, o_pos, binaddr = row
349 # if not redeemed, we add the script
351 if not row[4]: txpoint['raw_output_script'] = row[1]
356 # do not cache mempool results because statuses are ambiguous
357 if not address_has_mempool:
358 self.tx_cache[addr] = txpoints
363 def get_status(self,addr):
364 # get address status, i.e. the last block for that address.
365 tx_points = self.get_history(addr)
369 lastpoint = tx_points[-1]
370 status = lastpoint['block_hash']
371 # this is a temporary hack; move it up once old clients have disappeared
372 if status == 'mempool': # and session['version'] != "old":
373 status = status + ':%d'% len(tx_points)
378 def memorypool_update(store):
380 ds = BCDataStream.BCDataStream()
381 postdata = dumps({"method": 'getmemorypool', 'params': [], 'id':'jsonrpc'})
383 respdata = urllib.urlopen(store.bitcoind_url, postdata).read()
385 if r['error'] != None:
388 v = r['result'].get('transactions')
391 ds.write(hextx.decode('hex'))
392 tx = deserialize.parse_Transaction(ds)
393 tx['hash'] = util.double_sha256(tx['tx'])
394 tx_hash = store.hashin(tx['hash'])
396 if store.tx_find_id_and_value(tx):
399 tx_id = store.import_tx(tx, False)
400 store.update_tx_cache(tx_id)
406 def send_tx(self,tx):
407 postdata = dumps({"method": 'importtransaction', 'params': [tx], 'id':'jsonrpc'})
408 respdata = urllib.urlopen(self.bitcoind_url, postdata).read()
410 if r['error'] != None:
411 msg = r['error'].get('message')
412 out = "error: transaction rejected by memorypool: " + msg + "\n" + tx
418 def main_iteration(store):
421 store.memorypool_update()
422 block_number = store.get_block_number(1)
429 # if there is an exception, do rollback and then re-raise the exception
430 for dircfg in store.datadirs:
432 store.catch_up_dir(dircfg)
434 store.log.exception("Failed to catch up %s", dircfg)
441 from processor import Processor
443 class BlockchainProcessor(Processor):
445 def __init__(self, config):
446 Processor.__init__(self)
447 self.store = AbeStore(config)
448 self.block_number = -1
449 self.watched_addresses = []
452 n = self.store.main_iteration()
453 print "blockchain: %d blocks"%n
455 threading.Timer(10, self.run_store_iteration).start()
457 def process(self, request):
458 #print "abe process", request
460 message_id = request['id']
461 method = request['method']
462 params = request.get('params',[])
466 if method == 'blockchain.numblocks.subscribe':
467 result = self.block_number
469 elif method == 'blockchain.address.subscribe':
472 result = self.store.get_status(address)
473 self.watch_address(address)
474 except BaseException, e:
475 error = str(e) + ': ' + address
476 print "error:", error
478 elif method == 'blockchain.address.get_history':
481 result = self.store.get_history( address )
482 except BaseException, e:
483 error = str(e) + ': ' + address
484 print "error:", error
486 elif method == 'blockchain.transaction.broadcast':
487 txo = self.store.send_tx(params[0])
488 print "sent tx:", txo
492 error = "unknown method:%s"%method
496 response = { 'id':message_id, 'error':error }
497 self.push_response(response)
499 response = { 'id':message_id, 'result':result }
500 self.push_response(response)
503 def watch_address(self, addr):
504 if addr not in self.watched_addresses:
505 self.watched_addresses.append(addr)
508 def run_store_iteration(self):
511 block_number = self.store.main_iteration()
513 traceback.print_exc(file=sys.stdout)
517 if self.shared.stopped():
521 if self.block_number != block_number:
522 self.block_number = block_number
523 print "block number:", self.block_number
524 self.push_response({ 'method':'blockchain.numblocks.subscribe', 'params':[self.block_number] })
528 addr = self.store.address_queue.get(False)
531 if addr in self.watched_addresses:
532 status = self.store.get_status( addr )
533 self.push_response({ 'method':'blockchain.address.subscribe', 'params':[addr, status] })
535 threading.Timer(10, self.run_store_iteration).start()