config.set('server', 'port', 50000)
config.set('server', 'password', '')
config.set('server', 'irc', 'yes')
-config.set('server', 'cache', 'no')
config.set('server', 'ircname', 'Electrum server')
config.add_section('database')
config.set('database', 'type', 'psycopg2')
block_number = -1
old_block_number = -1
sessions = {}
-sessions_sub_numblocks = [] # sessions that have subscribed to the service
+sessions_sub_numblocks = {} # sessions that have subscribed to the service
dblock = thread.allocate_lock()
peer_list = {}
class MyStore(Datastore_class):
- def import_tx(self, tx, is_coinbase):
- tx_id = super(MyStore, self).import_tx(tx, is_coinbase)
- if config.get('server', 'cache') == 'yes': self.update_tx_cache(tx_id)
+ def import_block(self, b, chain_ids=frozenset()):
+ block_id = super(MyStore, self).import_block(b, chain_ids)
+ #print "block", block_id
+ for pos in xrange(len(b['transactions'])):
+ tx = b['transactions'][pos]
+ if 'hash' not in tx:
+ tx['hash'] = util.double_sha256(tx['tx'])
+ tx_id = store.tx_find_id_and_value(tx)
+ if tx_id:
+ self.update_tx_cache(tx_id)
+ else:
+ print "error: import_block: no tx_id"
+ return block_id
+
def update_tx_cache(self, txid):
inrows = self.get_tx_inputs(txid, False)
def get_history(self, addr):
- if config.get('server','cache') == 'yes':
- cached_version = self.tx_cache.get( addr )
- if cached_version is not None:
- return cached_version
+ cached_version = self.tx_cache.get( addr )
+ if cached_version is not None:
+ return cached_version
version, binaddr = decode_check_address(addr)
if binaddr is None:
if not row[4]: txpoint['raw_scriptPubKey'] = row[1]
# cache result
- if config.get('server','cache') == 'yes' and not address_has_mempool:
+ if not address_has_mempool:
self.tx_cache[addr] = txpoints
return txpoints
addresses = session['addresses'].keys()
if addr in addresses:
- print "address ", addr, "found in session", session_id
status = get_address_status( addr )
- print "new_status:", status
- last_status = session['addresses'][addr]
- print "last_status", last_status
+ message_id, last_status = session['addresses'][addr]
if last_status != status:
- print "status is new", addr
- send_status(session_id,addr,status)
- sessions[session_id]['addresses'][addr] = status
+ #print "sending new status for %s:"%addr, status
+ send_status(session_id,message_id,addr,status)
+ sessions[session_id]['addresses'][addr] = (message_id,status)
def get_address_status(addr):
def send_numblocks(session_id):
- out = json.dumps( {'method':'numblocks.subscribe', 'result':block_number} )
+ message_id = sessions_sub_numblocks[session_id]
+ out = json.dumps( {'id':message_id, 'result':block_number} )
output_queue.put((session_id, out))
-def send_status(session_id, address, status):
- out = json.dumps( { 'method':'address.subscribe', 'address':address, 'status':status } )
+def send_status(session_id, message_id, address, status):
+ out = json.dumps( { 'id':message_id, 'result':status } )
output_queue.put((session_id, out))
-def subscribe_to_numblocks(session_id):
- sessions_sub_numblocks.append(session_id)
+def subscribe_to_numblocks(session_id, message_id):
+ sessions_sub_numblocks[session_id] = message_id
send_numblocks(session_id)
-def subscribe_to_address(session_id, address):
+def subscribe_to_address(session_id, message_id, address):
status = get_address_status(address)
sessions[session_id]['type'] = 'subscribe'
- sessions[session_id]['addresses'][address] = status
+ sessions[session_id]['addresses'][address] = (message_id, status)
sessions[session_id]['last_time'] = time.time()
- send_status(session_id, address, status)
+ send_status(session_id, message_id, address, status)
def new_session(version, addresses):
session_id = random_string(10)
conn.close()
+def timestr():
+ return time.strftime("[%d/%m/%Y-%H:%M:%S]")
# used by the native handler
def do_command(cmd, data, ipaddr):
- timestr = time.strftime("[%d/%m/%Y-%H:%M:%S]")
-
if cmd=='b':
out = "%d"%block_number
except:
print "error", data
return None
- print timestr, "new session", ipaddr, addresses[0] if addresses else addresses, len(addresses), version
+ print timestr(), "new session", ipaddr, addresses[0] if addresses else addresses, len(addresses), version
out = new_session(version, addresses)
elif cmd=='update_session':
except:
print "error"
return None
- print timestr, "update session", ipaddr, addresses[0] if addresses else addresses, len(addresses)
+ print timestr(), "update session", ipaddr, addresses[0] if addresses else addresses, len(addresses)
out = update_session(session_id,addresses)
elif cmd == 'bccapi_login':
elif cmd =='tx':
out = send_tx(data)
- print timestr, "sent tx:", ipaddr, out
+ print timestr(), "sent tx:", ipaddr, out
elif cmd == 'stop':
out = cmd_stop(data)
traceback.print_exc(file=sys.stdout)
-def close_sesion(session_id):
+def close_session(session_id):
print "lost connection", session_id
sessions.pop(session_id)
- sessions_sub_numblocks.remove(session_id)
+ if session_id in sessions_sub_numblocks:
+ sessions_sub_numblocks.pop(session_id)
# one thread per client. put requests in a queue.
def tcp_client_thread(ipaddr,conn):
""" use a persistent connection. put commands in a queue."""
- print "persistent client thread", ipaddr
+
+ print timestr(), "TCP session", ipaddr
global sessions
session_id = random_string(10)
msg = ''
while not stopping:
- d = conn.recv(1024)
- msg += d
+ try:
+ d = conn.recv(1024)
+ except socket.error:
+ d = ''
if not d:
- close_sesion(session_id)
+ close_session(session_id)
break
+ msg += d
while True:
s = msg.find('\n')
if s ==-1:
break
else:
- c = msg[0:s]
+ c = msg[0:s].strip()
msg = msg[s+1:]
- c = json.loads(c)
+ if c == 'quit':
+ conn.close()
+ close_session(session_id)
+ return
try:
- cmd = c['method']
- data = c['params']
+ c = json.loads(c)
+ except:
+ print "json error", repr(c)
+ continue
+ try:
+ message_id = c.get('id')
+ method = c.get('method')
+ params = c.get('params')
except:
print "syntax error", repr(c), ipaddr
continue
# add to queue
- input_queue.put((session_id, cmd, data))
+ input_queue.put((session_id, message_id, method, params))
+
# read commands from the input queue. perform requests, etc. this should be called from the main thread.
def process_input_queue():
while not stopping:
- session_id, cmd, data = input_queue.get()
+ session_id, message_id, method, data = input_queue.get()
+ if session_id not in sessions.keys():
+ continue
out = None
- if cmd == 'address.subscribe':
- subscribe_to_address(session_id,data)
- elif cmd == 'numblocks.subscribe':
- subscribe_to_numblocks(session_id)
- elif cmd == 'client.version':
- sessions[session_id]['version'] = data
- elif cmd == 'server.banner':
- out = json.dumps( { 'method':'server.banner', 'result':config.get('server','banner').replace('\\n','\n') } )
- elif cmd == 'server.peers':
- out = json.dumps( { 'method':'server.peers', 'result':peer_list.values() } )
- elif cmd == 'address.get_history':
- address = data
- out = json.dumps( { 'method':'address.get_history', 'address':address, 'result':store.get_history( address ) } )
- elif cmd == 'transaction.broadcast':
+ if method == 'address.subscribe':
+ address = data[0]
+ subscribe_to_address(session_id,message_id,address)
+ elif method == 'numblocks.subscribe':
+ subscribe_to_numblocks(session_id,message_id)
+ elif method == 'client.version':
+ sessions[session_id]['version'] = data[0]
+ elif method == 'server.banner':
+ out = { 'result':config.get('server','banner').replace('\\n','\n') }
+ elif method == 'server.peers':
+ out = { 'result':peer_list.values() }
+ elif method == 'address.get_history':
+ address = data[0]
+ out = { 'result':store.get_history( address ) }
+ elif method == 'transaction.broadcast':
txo = send_tx(data)
print "sent tx:", txo
- out = json.dumps( { 'method':'transaction.broadcast', 'result':txo } )
+ out = { 'result':txo }
else:
- print "unknown command", cmd
+ print "unknown command", method
if out:
+ out['id'] = message_id
+ out = json.dumps( out )
output_queue.put((session_id, out))
# this is a separate thread
def memorypool_update(store):
+
ds = BCDataStream.BCDataStream()
+ previous_transactions = store.mempool_keys
store.mempool_keys = []
postdata = dumps({"method": 'getmemorypool', 'params': [], 'id':'jsonrpc'})
ds.write(hextx.decode('hex'))
tx = deserialize.parse_Transaction(ds)
tx['hash'] = util.double_sha256(tx['tx'])
- tx_hash = tx['hash'][::-1].encode('hex')
+ tx_hash = store.hashin(tx['hash'])
+
store.mempool_keys.append(tx_hash)
if store.tx_find_id_and_value(tx):
pass
else:
- store.import_tx(tx, False)
+ tx_id = store.import_tx(tx, False)
+ store.update_tx_cache(tx_id)
store.commit()
-
def clean_session_thread():
while not stopping:
time.sleep(30)
print "starting Electrum server"
- print "cache:", config.get('server', 'cache')
conf = DataStore.CONFIG_DEFAULTS
args, argv = readconf.parse_argv( [], conf)
dblock.acquire()
store.catch_up()
memorypool_update(store)
- block_number = store.get_block_number(1)
+ block_number = store.get_block_number(1)
if block_number != old_block_number:
old_block_number = block_number
- for session_id in sessions_sub_numblocks:
+ for session_id in sessions_sub_numblocks.keys():
send_numblocks(session_id)
except IOError: