import time, threading
-
class AbeStore(Datastore_class):
def __init__(self, config):
elif args.dbtype == 'psycopg2':
args.connect_args = { 'database' : config.get('database','database') }
+ coin = config.get('server', 'coin')
+ self.addrtype = 0
+ if coin == 'litecoin':
+ print 'Litecoin settings:'
+ datadir = config.get('server','datadir')
+ print ' datadir = ' + datadir
+ args.datadir = [{"dirname":datadir,"chain":"Litecoin","code3":"LTC","address_version":"\u0030"}]
+ print ' addrtype = 48'
+ self.addrtype = 48
+
Datastore_class.__init__(self,args)
+ self.chain_id = self.datadirs[0]["chain_id"];
+ print 'Coin chain_id = %d' % self.chain_id
+
self.sql_limit = int( config.get('database','limit') )
self.tx_cache = {}
self.address_queue = Queue()
self.dblock = thread.allocate_lock()
+ self.last_tx_id = 0
+
+
+ def import_tx(self, tx, is_coinbase):
+ tx_id = super(AbeStore, self).import_tx(tx, is_coinbase)
+ self.last_tx_id = tx_id
+ return tx_id
+
#print "WARNING: missing tx_in for tx", txid
continue
- address = hash_to_address(chr(0), _hash)
+ address = hash_to_address(chr(self.addrtype), _hash)
if self.tx_cache.has_key(address):
print "cache: invalidating", address
self.tx_cache.pop(address)
#print "WARNING: missing tx_out for tx", txid
continue
- address = hash_to_address(chr(0), _hash)
+ address = hash_to_address(chr(self.addrtype), _hash)
if self.tx_cache.has_key(address):
print "cache: invalidating", address
self.tx_cache.pop(address)
JOIN txout prevout ON (txin.txout_id = prevout.txout_id)
JOIN pubkey ON (pubkey.pubkey_id = prevout.pubkey_id)
WHERE pubkey.pubkey_hash = ?
+ AND cc.chain_id = ?
AND cc.in_longest = 1
- LIMIT ? """, (dbhash,self.sql_limit))
+ LIMIT ? """, (dbhash, self.chain_id, self.sql_limit))
if len(out)==self.sql_limit:
raise BaseException('limit reached')
JOIN txout ON (txout.tx_id = tx.tx_id)
JOIN pubkey ON (pubkey.pubkey_id = txout.pubkey_id)
WHERE pubkey.pubkey_hash = ?
+ AND cc.chain_id = ?
AND cc.in_longest = 1
- LIMIT ? """, (dbhash,self.sql_limit))
+ LIMIT ? """, (dbhash, self.chain_id, self.sql_limit))
if len(out)==self.sql_limit:
raise BaseException('limit reached')
rows += self.get_address_out_rows_memorypool( dbhash )
address_has_mempool = False
- current_id = self.safe_sql("""SELECT last_value FROM tx_seq""")
- current_id = current_id[0][0]
-
for row in rows:
is_in, tx_hash, tx_id, pos, value = row
tx_hash = self.hashout_hex(tx_hash)
if tx_hash in known_tx:
continue
- # this means that pending transactions were added to the db, even if they are not returned by getmemorypool
- address_has_mempool = True
-
- # fixme: we need to detect transactions that became invalid
- if current_id - tx_id > 10000:
+ # discard transactions that are too old
+ if self.last_tx_id - tx_id > 50000:
+ print "discarding tx id", tx_id
continue
+ # this means that pending transactions were added to the db, even if they are not returned by getmemorypool
+ address_has_mempool = True
#print "mempool", tx_hash
txpoint = {
if not _hash:
#print "WARNING: missing tx_in for tx", tx_id, addr
continue
- address = hash_to_address(chr(0), _hash)
+ address = hash_to_address(chr(self.addrtype), _hash)
txinputs.append(address)
txpoint['inputs'] = txinputs
txoutputs = []
if not _hash:
#print "WARNING: missing tx_out for tx", tx_id, addr
continue
- address = hash_to_address(chr(0), _hash)
+ address = hash_to_address(chr(self.addrtype), _hash)
txoutputs.append(address)
txpoint['outputs'] = txoutputs
with store.dblock:
store.catch_up()
store.memorypool_update()
- block_number = store.get_block_number(1)
+ block_number = store.get_block_number(store.chain_id)
return block_number
self.store = AbeStore(config)
self.block_number = -1
self.watched_addresses = []
+
+ # catch_up first
+ n = self.store.main_iteration()
+ print "blockchain: %d blocks"%n
+
threading.Timer(10, self.run_store_iteration).start()
def process(self, request):