X-Git-Url: https://git.novaco.in/?a=blobdiff_plain;f=backends%2Fabe%2F__init__.py;h=8c4454ee7a8fe46c33a44c91ee8949bb025c2af6;hb=189fded888582341014f2e34ed6c173926ad5417;hp=f5aa906abfd0f27bb7f74ecb50efa2ed5d794e50;hpb=2d0801ada549bcbaacc92a8332e82c2f0602f3cf;p=electrum-server.git diff --git a/backends/abe/__init__.py b/backends/abe/__init__.py index f5aa906..8c4454e 100644 --- a/backends/abe/__init__.py +++ b/backends/abe/__init__.py @@ -10,8 +10,6 @@ from Queue import Queue import time, threading -SQL_LIMIT=200 - class AbeStore(Datastore_class): def __init__(self, config): @@ -25,14 +23,34 @@ class AbeStore(Datastore_class): elif args.dbtype == 'psycopg2': args.connect_args = { 'database' : config.get('database','database') } + coin = config.get('server', 'coin') + self.addrtype = 0 + if coin == 'litecoin': + print 'Litecoin settings:' + datadir = config.get('server','datadir') + print ' datadir = ' + datadir + args.datadir = [{"dirname":datadir,"chain":"Litecoin","code3":"LTC","address_version":"\u0030"}] + print ' addrtype = 48' + self.addrtype = 48 + Datastore_class.__init__(self,args) + self.sql_limit = int( config.get('database','limit') ) + self.tx_cache = {} self.bitcoind_url = 'http://%s:%s@%s:%s/' % ( config.get('bitcoind','user'), config.get('bitcoind','password'), config.get('bitcoind','host'), config.get('bitcoind','port')) self.address_queue = Queue() self.dblock = thread.allocate_lock() + self.last_tx_id = 0 + + + def import_tx(self, tx, is_coinbase): + tx_id = super(AbeStore, self).import_tx(tx, is_coinbase) + self.last_tx_id = tx_id + return tx_id + @@ -59,7 +77,7 @@ class AbeStore(Datastore_class): #print "WARNING: missing tx_in for tx", txid continue - address = hash_to_address(chr(0), _hash) + address = hash_to_address(chr(self.addrtype), _hash) if self.tx_cache.has_key(address): print "cache: invalidating", address self.tx_cache.pop(address) @@ -72,7 +90,7 @@ class AbeStore(Datastore_class): #print "WARNING: missing tx_out for tx", txid continue - address = hash_to_address(chr(0), _hash) + address = hash_to_address(chr(self.addrtype), _hash) if self.tx_cache.has_key(address): print "cache: invalidating", address self.tx_cache.pop(address) @@ -86,6 +104,7 @@ class AbeStore(Datastore_class): ret = self.selectall(sql,params) except: error = True + traceback.print_exc(file=sys.stdout) finally: if lock: self.dblock.release() @@ -151,9 +170,9 @@ class AbeStore(Datastore_class): JOIN pubkey ON (pubkey.pubkey_id = prevout.pubkey_id) WHERE pubkey.pubkey_hash = ? AND cc.in_longest = 1 - LIMIT ? """, (dbhash,SQL_LIMIT)) + LIMIT ? """, (dbhash,self.sql_limit)) - if len(out)==SQL_LIMIT: + if len(out)==self.sql_limit: raise BaseException('limit reached') return out @@ -169,9 +188,9 @@ class AbeStore(Datastore_class): JOIN txout prevout ON (txin.txout_id = prevout.txout_id) JOIN pubkey ON (pubkey.pubkey_id = prevout.pubkey_id) WHERE pubkey.pubkey_hash = ? - LIMIT ? """, (dbhash,SQL_LIMIT)) + LIMIT ? """, (dbhash,self.sql_limit)) - if len(out)==SQL_LIMIT: + if len(out)==self.sql_limit: raise BaseException('limit reached') return out @@ -194,9 +213,9 @@ class AbeStore(Datastore_class): JOIN pubkey ON (pubkey.pubkey_id = txout.pubkey_id) WHERE pubkey.pubkey_hash = ? AND cc.in_longest = 1 - LIMIT ? """, (dbhash,SQL_LIMIT)) + LIMIT ? """, (dbhash,self.sql_limit)) - if len(out)==SQL_LIMIT: + if len(out)==self.sql_limit: raise BaseException('limit reached') return out @@ -211,9 +230,9 @@ class AbeStore(Datastore_class): JOIN txout ON (txout.tx_id = tx.tx_id) JOIN pubkey ON (pubkey.pubkey_id = txout.pubkey_id) WHERE pubkey.pubkey_hash = ? - LIMIT ? """, (dbhash,SQL_LIMIT)) + LIMIT ? """, (dbhash,self.sql_limit)) - if len(out)==SQL_LIMIT: + if len(out)==self.sql_limit: raise BaseException('limit reached') return out @@ -266,21 +285,19 @@ class AbeStore(Datastore_class): rows += self.get_address_out_rows_memorypool( dbhash ) address_has_mempool = False - current_id = self.new_id("tx") - for row in rows: is_in, tx_hash, tx_id, pos, value = row tx_hash = self.hashout_hex(tx_hash) if tx_hash in known_tx: continue - # this means that pending transactions were added to the db, even if they are not returned by getmemorypool - address_has_mempool = True - - # fixme: we need to detect transactions that became invalid - if current_id - tx_id > 10000: + # discard transactions that are too old + if self.last_tx_id - tx_id > 50000: + print "discarding tx id", tx_id continue + # this means that pending transactions were added to the db, even if they are not returned by getmemorypool + address_has_mempool = True #print "mempool", tx_hash txpoint = { @@ -306,7 +323,7 @@ class AbeStore(Datastore_class): if not _hash: #print "WARNING: missing tx_in for tx", tx_id, addr continue - address = hash_to_address(chr(0), _hash) + address = hash_to_address(chr(self.addrtype), _hash) txinputs.append(address) txpoint['inputs'] = txinputs txoutputs = [] @@ -316,7 +333,7 @@ class AbeStore(Datastore_class): if not _hash: #print "WARNING: missing tx_out for tx", tx_id, addr continue - address = hash_to_address(chr(0), _hash) + address = hash_to_address(chr(self.addrtype), _hash) txoutputs.append(address) txpoint['outputs'] = txoutputs @@ -333,6 +350,8 @@ class AbeStore(Datastore_class): if row: if not row[4]: txpoint['raw_output_script'] = row[1] + txpoint.pop('tx_id') + # cache result # do not cache mempool results because statuses are ambiguous if not address_has_mempool: @@ -379,6 +398,7 @@ class AbeStore(Datastore_class): else: tx_id = store.import_tx(tx, False) store.update_tx_cache(tx_id) + #print tx_hash store.commit() @@ -396,22 +416,13 @@ class AbeStore(Datastore_class): def main_iteration(store): - try: - store.dblock.acquire() + with store.dblock: store.catch_up() store.memorypool_update() block_number = store.get_block_number(1) + return block_number - except IOError: - print "IOError: cannot reach bitcoind" - block_number = 0 - except: - traceback.print_exc(file=sys.stdout) - block_number = 0 - finally: - store.dblock.release() - return block_number def catch_up(store): @@ -436,6 +447,11 @@ class BlockchainProcessor(Processor): self.store = AbeStore(config) self.block_number = -1 self.watched_addresses = [] + + # catch_up first + n = self.store.main_iteration() + print "blockchain: %d blocks"%n + threading.Timer(10, self.run_store_iteration).start() def process(self, request): @@ -456,14 +472,16 @@ class BlockchainProcessor(Processor): result = self.store.get_status(address) self.watch_address(address) except BaseException, e: - error = str(e) + error = str(e) + ': ' + address + print "error:", error elif method == 'blockchain.address.get_history': try: address = params[0] result = self.store.get_history( address ) except BaseException, e: - error = str(e) + error = str(e) + ': ' + address + print "error:", error elif method == 'blockchain.transaction.broadcast': txo = self.store.send_tx(params[0]) @@ -488,11 +506,18 @@ class BlockchainProcessor(Processor): def run_store_iteration(self): + + try: + block_number = self.store.main_iteration() + except: + traceback.print_exc(file=sys.stdout) + print "terminating" + self.shared.stop() + if self.shared.stopped(): print "exit timer" return - - block_number = self.store.main_iteration() + if self.block_number != block_number: self.block_number = block_number print "block number:", self.block_number