X-Git-Url: https://git.novaco.in/?a=blobdiff_plain;f=server.py;h=0d0a63afbe920bb9d4bb39b1ea24bd8acaa995ce;hb=313d575964630d842c34a40364425130f6228387;hp=2d4118a60fe27fd365e3cca979023d7c3334ea57;hpb=bb925d1cf82639af3808483b22ee810144a3f2f9;p=electrum-server.git diff --git a/server.py b/server.py index 2d4118a..0d0a63a 100755 --- a/server.py +++ b/server.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright(C) 2011 thomasv@gitorious +# Copyright(C) 2012 thomasv@gitorious # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as @@ -25,100 +25,79 @@ Todo: """ -import time, json, socket, operator, thread, ast, sys,re -import psycopg2, binascii - from Abe.abe import hash_to_address, decode_check_address from Abe.DataStore import DataStore as Datastore_class from Abe import DataStore, readconf, BCDataStream, deserialize, util, base58 -import ConfigParser -from json import dumps, loads -import urllib +import psycopg2, binascii -# we need to import electrum -sys.path.append('../client/') -from wallet import Wallet -from interface import Interface +import thread, traceback, sys, urllib, operator +from json import dumps, loads -config = ConfigParser.ConfigParser() -# set some defaults, which will be overwritten by the config file -config.add_section('server') -config.set('server','banner', 'Welcome to Electrum!') -config.set('server', 'host', 'localhost') -config.set('server', 'port', 50000) -config.set('server', 'password', '') -config.set('server', 'irc', 'yes') -config.set('server', 'cache', 'no') -config.set('server', 'ircname', 'Electrum server') -config.add_section('database') -config.set('database', 'type', 'psycopg2') -config.set('database', 'database', 'abe') +class MyStore(Datastore_class): -try: - f = open('/etc/electrum.conf','r') - config.readfp(f) - f.close() -except: - print "Could not read electrum.conf. I will use the default values." + def __init__(self, config): + conf = DataStore.CONFIG_DEFAULTS + args, argv = readconf.parse_argv( [], conf) + args.dbtype = config.get('database','type') + if args.dbtype == 'sqlite3': + args.connect_args = { 'database' : config.get('database','database') } + elif args.dbtype == 'MySQLdb': + args.connect_args = { 'db' : config.get('database','database'), 'user' : config.get('database','username'), 'passwd' : config.get('database','password') } + elif args.dbtype == 'psycopg2': + args.connect_args = { 'database' : config.get('database','database') } -try: - f = open('/etc/electrum.banner','r') - config.set('server','banner', f.read()) - f.close() -except: - pass + Datastore_class.__init__(self,args) -password = config.get('server','password') -bitcoind_url = 'http://%s:%s@%s:%s/' % ( config.get('bitcoind','user'), config.get('bitcoind','password'), config.get('bitcoind','host'), config.get('bitcoind','port')) + self.tx_cache = {} + self.mempool_keys = {} + self.bitcoind_url = 'http://%s:%s@%s:%s/' % ( config.get('bitcoind','user'), config.get('bitcoind','password'), config.get('bitcoind','host'), config.get('bitcoind','port')) -stopping = False -block_number = -1 -old_block_number = -1 -sessions = {} -sessions_sub_numblocks = [] # sessions that have subscribed to the service + self.address_queue = Queue() -dblock = thread.allocate_lock() -peer_list = {} + self.dblock = thread.allocate_lock() -wallets = {} # for ultra-light clients such as bccapi -from Queue import Queue -input_queue = Queue() -output_queue = Queue() -address_queue = Queue() -class MyStore(Datastore_class): + def import_block(self, b, chain_ids=frozenset()): + block_id = super(MyStore, self).import_block(b, chain_ids) + for pos in xrange(len(b['transactions'])): + tx = b['transactions'][pos] + if 'hash' not in tx: + tx['hash'] = util.double_sha256(tx['tx']) + tx_id = store.tx_find_id_and_value(tx) + if tx_id: + self.update_tx_cache(tx_id) + else: + print "error: import_block: no tx_id" + return block_id - def import_tx(self, tx, is_coinbase): - tx_id = super(MyStore, self).import_tx(tx, is_coinbase) - if config.get('server', 'cache') == 'yes': self.update_tx_cache(tx_id) def update_tx_cache(self, txid): inrows = self.get_tx_inputs(txid, False) for row in inrows: - _hash = store.binout(row[6]) + _hash = self.binout(row[6]) address = hash_to_address(chr(0), _hash) if self.tx_cache.has_key(address): print "cache: invalidating", address self.tx_cache.pop(address) - address_queue.put(address) + self.address_queue.put(address) outrows = self.get_tx_outputs(txid, False) for row in outrows: - _hash = store.binout(row[6]) + _hash = self.binout(row[6]) address = hash_to_address(chr(0), _hash) if self.tx_cache.has_key(address): print "cache: invalidating", address self.tx_cache.pop(address) - address_queue.put(address) + self.address_queue.put(address) def safe_sql(self,sql, params=(), lock=True): try: - if lock: dblock.acquire() + if lock: self.dblock.acquire() ret = self.selectall(sql,params) - if lock: dblock.release() + if lock: self.dblock.release() return ret except: print "sql error", sql @@ -227,10 +206,9 @@ class MyStore(Datastore_class): def get_history(self, addr): - if config.get('server','cache') == 'yes': - cached_version = self.tx_cache.get( addr ) - if cached_version is not None: - return cached_version + cached_version = self.tx_cache.get( addr ) + if cached_version is not None: + return cached_version version, binaddr = decode_check_address(addr) if binaddr is None: @@ -334,38 +312,126 @@ class MyStore(Datastore_class): if not row[4]: txpoint['raw_scriptPubKey'] = row[1] # cache result - if config.get('server','cache') == 'yes' and not address_has_mempool: + if not address_has_mempool: self.tx_cache[addr] = txpoints return txpoints -class Direct_Interface(Interface): - def __init__(self): - pass + def memorypool_update(store): - def handler(self, method, params = ''): - cmds = {'session.new':new_session, - 'session.poll':poll_session, - 'session.update':update_session, - 'blockchain.transaction.broadcast':send_tx, - 'blockchain.address.get_history':store.get_history - } - func = cmds[method] - return func( params ) + ds = BCDataStream.BCDataStream() + previous_transactions = store.mempool_keys + store.mempool_keys = [] + postdata = dumps({"method": 'getmemorypool', 'params': [], 'id':'jsonrpc'}) + respdata = urllib.urlopen(store.bitcoind_url, postdata).read() + r = loads(respdata) + if r['error'] != None: + return + + v = r['result'].get('transactions') + for hextx in v: + ds.clear() + ds.write(hextx.decode('hex')) + tx = deserialize.parse_Transaction(ds) + tx['hash'] = util.double_sha256(tx['tx']) + tx_hash = store.hashin(tx['hash']) + + store.mempool_keys.append(tx_hash) + if store.tx_find_id_and_value(tx): + pass + else: + tx_id = store.import_tx(tx, False) + store.update_tx_cache(tx_id) + + store.commit() + + + def send_tx(self,tx): + postdata = dumps({"method": 'importtransaction', 'params': [tx], 'id':'jsonrpc'}) + respdata = urllib.urlopen(self.bitcoind_url, postdata).read() + r = loads(respdata) + if r['error'] != None: + out = "error: transaction rejected by memorypool\n"+tx + else: + out = r['result'] + return out + + + def main_iteration(store): + try: + store.dblock.acquire() + store.catch_up() + store.memorypool_update() + block_number = store.get_block_number(1) + + except IOError: + print "IOError: cannot reach bitcoind" + block_number = 0 + except: + traceback.print_exc(file=sys.stdout) + block_number = 0 + finally: + store.dblock.release() + + return block_number + + + +import time, json, socket, operator, thread, ast, sys, re, traceback +import ConfigParser +from json import dumps, loads +import urllib + + +config = ConfigParser.ConfigParser() +# set some defaults, which will be overwritten by the config file +config.add_section('server') +config.set('server','banner', 'Welcome to Electrum!') +config.set('server', 'host', 'localhost') +config.set('server', 'port', '50000') +config.set('server', 'password', '') +config.set('server', 'irc', 'yes') +config.set('server', 'ircname', 'Electrum server') +config.add_section('database') +config.set('database', 'type', 'psycopg2') +config.set('database', 'database', 'abe') + +try: + f = open('/etc/electrum.conf','r') + config.readfp(f) + f.close() +except: + print "Could not read electrum.conf. I will use the default values." + +try: + f = open('/etc/electrum.banner','r') + config.set('server','banner', f.read()) + f.close() +except: + pass + + +password = config.get('server','password') + +stopping = False +block_number = -1 +sessions = {} +sessions_sub_numblocks = {} # sessions that have subscribed to the service + +m_sessions = [{}] # served by http + +peer_list = {} + +wallets = {} # for ultra-light clients such as bccapi + +from Queue import Queue +input_queue = Queue() +output_queue = Queue() -def send_tx(tx): - postdata = dumps({"method": 'importtransaction', 'params': [tx], 'id':'jsonrpc'}) - respdata = urllib.urlopen(bitcoind_url, postdata).read() - r = loads(respdata) - if r['error'] != None: - out = "error: transaction rejected by memorypool\n"+tx - else: - out = r['result'] - return out @@ -375,59 +441,74 @@ def random_string(N): -def cmd_stop(data): +def cmd_stop(_,__,pw): global stopping - if password == data: + if password == pw: stopping = True return 'ok' else: return 'wrong password' -def cmd_load(pw): +def cmd_load(_,__,pw): if password == pw: return repr( len(sessions) ) else: return 'wrong password' -def clear_cache(pw): - if password == pw: - store.tx_cache = {} - return 'ok' - else: - return 'wrong password' -def get_cache(pw,addr): - if password == pw: - return store.tx_cache.get(addr) - else: - return 'wrong password' -def poll_session(session_id): +def modified_addresses(a_session): + #t1 = time.time() + import copy + session = copy.deepcopy(a_session) + addresses = session['addresses'] + session['last_time'] = time.time() + ret = {} + k = 0 + for addr in addresses: + status = get_address_status( addr ) + msg_id, last_status = addresses.get( addr ) + if last_status != status: + addresses[addr] = msg_id, status + ret[addr] = status + + #t2 = time.time() - t1 + #if t2 > 10: print "high load:", session_id, "%d/%d"%(k,len(addresses)), t2 + return ret, addresses + + +def poll_session(session_id): + # native session = sessions.get(session_id) if session is None: print time.asctime(), "session not found", session_id - out = repr( (-1, {})) + return -1, {} else: - t1 = time.time() - addresses = session['addresses'] - session['last_time'] = time.time() - ret = {} - k = 0 - for addr in addresses: - if store.tx_cache.get( addr ) is not None: k += 1 - status = get_address_status( addr ) - last_status = addresses.get( addr ) - if last_status != status: - addresses[addr] = status - ret[addr] = status - if ret: - sessions[session_id]['addresses'] = addresses - out = repr( (block_number, ret ) ) - t2 = time.time() - t1 - if t2 > 10: - print "high load:", session_id, "%d/%d"%(k,len(addresses)), t2 + ret, addresses = modified_addresses(session) + if ret: sessions[session_id]['addresses'] = addresses + return repr( (block_number,ret)) + + +def poll_session_json(session_id, message_id): + session = m_sessions[0].get(session_id) + if session is None: + raise BaseException("session not found %s"%session_id) + else: + out = [] + ret, addresses = modified_addresses(session) + if ret: + m_sessions[0][session_id]['addresses'] = addresses + for addr in ret: + msg_id, status = addresses[addr] + out.append( { 'id':msg_id, 'result':status } ) + + msg_id, last_nb = session.get('numblocks') + if last_nb: + if last_nb != block_number: + m_sessions[0][session_id]['numblocks'] = msg_id, block_number + out.append( {'id':msg_id, 'result':block_number} ) return out @@ -435,22 +516,19 @@ def poll_session(session_id): def do_update_address(addr): # an address was involved in a transaction; we check if it was subscribed to in a session # the address can be subscribed in several sessions; the cache should ensure that we don't do redundant requests + for session_id in sessions.keys(): session = sessions[session_id] - if session.get('type') != 'subscribe': continue + if session.get('type') != 'persistent': continue addresses = session['addresses'].keys() if addr in addresses: - print "address ", addr, "found in session", session_id status = get_address_status( addr ) - print "new_status:", status - last_status = session['addresses'][addr] - print "last_status", last_status + message_id, last_status = session['addresses'][addr] if last_status != status: - print "status is new", addr - send_status(session_id,addr,status) - sessions[session_id]['addresses'][addr] = status - + #print "sending new status for %s:"%addr, status + send_status(session_id,message_id,addr,status) + sessions[session_id]['addresses'][addr] = (message_id,status) def get_address_status(addr): # get address status, i.e. the last block for that address. @@ -467,34 +545,79 @@ def get_address_status(addr): def send_numblocks(session_id): - out = json.dumps( {'method':'numblocks.subscribe', 'result':block_number} ) + message_id = sessions_sub_numblocks[session_id] + out = json.dumps( {'id':message_id, 'result':block_number} ) output_queue.put((session_id, out)) -def send_status(session_id, address, status): - out = json.dumps( { 'method':'address.subscribe', 'address':address, 'status':status } ) +def send_status(session_id, message_id, address, status): + out = json.dumps( { 'id':message_id, 'result':status } ) output_queue.put((session_id, out)) -def subscribe_to_numblocks(session_id): - sessions_sub_numblocks.append(session_id) +def address_get_history_json(_,message_id,address): + return store.get_history(address) + +def subscribe_to_numblocks(session_id, message_id): + sessions_sub_numblocks[session_id] = message_id send_numblocks(session_id) -def subscribe_to_address(session_id, address): +def subscribe_to_numblocks_json(session_id, message_id): + global m_sessions + m_sessions[0][session_id]['numblocks'] = message_id,block_number + return block_number + +def subscribe_to_address(session_id, message_id, address): status = get_address_status(address) - sessions[session_id]['type'] = 'subscribe' - sessions[session_id]['addresses'][address] = status + sessions[session_id]['addresses'][address] = (message_id, status) sessions[session_id]['last_time'] = time.time() - send_status(session_id, address, status) + send_status(session_id, message_id, address, status) + +def add_address_to_session_json(session_id, message_id, address): + global m_sessions + sessions = m_sessions[0] + status = get_address_status(address) + sessions[session_id]['addresses'][address] = (message_id, status) + sessions[session_id]['last_time'] = time.time() + m_sessions[0] = sessions + return status + +def add_address_to_session(session_id, address): + status = get_address_status(address) + sessions[session_id]['addresses'][address] = ("", status) + sessions[session_id]['last_time'] = time.time() + return status def new_session(version, addresses): session_id = random_string(10) sessions[session_id] = { 'addresses':{}, 'version':version } for a in addresses: - sessions[session_id]['addresses'][a] = '' + sessions[session_id]['addresses'][a] = ('','') out = repr( (session_id, config.get('server','banner').replace('\\n','\n') ) ) sessions[session_id]['last_time'] = time.time() return out + +def client_version_json(session_id, _, version): + global m_sessions + sessions = m_sessions[0] + sessions[session_id]['version'] = version + m_sessions[0] = sessions + +def create_session_json(_, __): + sessions = m_sessions[0] + session_id = random_string(10) + print "creating session", session_id + sessions[session_id] = { 'addresses':{}, 'numblocks':('','') } + sessions[session_id]['last_time'] = time.time() + m_sessions[0] = sessions + return session_id + + + +def get_banner(_,__): + return config.get('server','banner').replace('\\n','\n') + def update_session(session_id,addresses): + """deprecated in 0.42""" sessions[session_id]['addresses'] = {} for a in addresses: sessions[session_id]['addresses'][a] = '' @@ -547,12 +670,12 @@ def native_client_thread(ipaddr,conn): conn.close() +def timestr(): + return time.strftime("[%d/%m/%Y-%H:%M:%S]") # used by the native handler def do_command(cmd, data, ipaddr): - timestr = time.strftime("[%d/%m/%Y-%H:%M:%S]") - if cmd=='b': out = "%d"%block_number @@ -567,64 +690,26 @@ def do_command(cmd, data, ipaddr): except: print "error", data return None - print timestr, "new session", ipaddr, addresses[0] if addresses else addresses, len(addresses), version + print timestr(), "new session", ipaddr, addresses[0] if addresses else addresses, len(addresses), version out = new_session(version, addresses) + elif cmd=='address.subscribe': + try: + session_id, addr = ast.literal_eval(data) + except: + traceback.print_exc(file=sys.stdout) + print data + return None + out = add_address_to_session(session_id,addr) + elif cmd=='update_session': try: session_id, addresses = ast.literal_eval(data) except: - print "error" + traceback.print_exc(file=sys.stdout) return None - print timestr, "update session", ipaddr, addresses[0] if addresses else addresses, len(addresses) + print timestr(), "update session", ipaddr, addresses[0] if addresses else addresses, len(addresses) out = update_session(session_id,addresses) - - elif cmd == 'bccapi_login': - import electrum - print "data",data - v, k = ast.literal_eval(data) - master_public_key = k.decode('hex') # todo: sanitize. no need to decode twice... - print master_public_key - wallet_id = random_string(10) - w = Wallet( Direct_Interface() ) - w.master_public_key = master_public_key.decode('hex') - w.synchronize() - wallets[wallet_id] = w - out = wallet_id - print "wallets", wallets - - elif cmd == 'bccapi_getAccountInfo': - from wallet import int_to_hex - v, wallet_id = ast.literal_eval(data) - w = wallets.get(wallet_id) - if w is not None: - num = len(w.addresses) - c, u = w.get_balance() - out = int_to_hex(num,4) + int_to_hex(c,8) + int_to_hex( c+u, 8 ) - out = out.decode('hex') - else: - print "error",data - out = "error" - - elif cmd == 'bccapi_getAccountStatement': - from wallet import int_to_hex - v, wallet_id = ast.literal_eval(data) - w = wallets.get(wallet_id) - if w is not None: - num = len(w.addresses) - c, u = w.get_balance() - total_records = num_records = 0 - out = int_to_hex(num,4) + int_to_hex(c,8) + int_to_hex( c+u, 8 ) + int_to_hex( total_records ) + int_to_hex( num_records ) - out = out.decode('hex') - else: - print "error",data - out = "error" - - elif cmd == 'bccapi_getSendCoinForm': - out = '' - - elif cmd == 'bccapi_submitTransaction': - out = '' elif cmd=='poll': out = poll_session(data) @@ -635,11 +720,11 @@ def do_command(cmd, data, ipaddr): out = repr( store.get_history( address ) ) elif cmd == 'load': - out = cmd_load(data) + out = cmd_load(None,None,data) elif cmd =='tx': - out = send_tx(data) - print timestr, "sent tx:", ipaddr, out + out = store.send_tx(data) + print timestr(), "sent tx:", ipaddr, out elif cmd == 'stop': out = cmd_stop(data) @@ -673,73 +758,95 @@ def tcp_server_thread(): traceback.print_exc(file=sys.stdout) -def close_sesion(session_id): - print "lost connection", session_id +def close_session(session_id): + #print "lost connection", session_id sessions.pop(session_id) - sessions_sub_numblocks.remove(session_id) + if session_id in sessions_sub_numblocks: + sessions_sub_numblocks.pop(session_id) # one thread per client. put requests in a queue. def tcp_client_thread(ipaddr,conn): """ use a persistent connection. put commands in a queue.""" - print "persistent client thread", ipaddr + + print timestr(), "TCP session", ipaddr global sessions session_id = random_string(10) - sessions[session_id] = { 'conn':conn, 'addresses':{}, 'version':'unknown' } + sessions[session_id] = { 'conn':conn, 'addresses':{}, 'version':'unknown', 'type':'persistent' } ipaddr = ipaddr[0] msg = '' while not stopping: - d = conn.recv(1024) - msg += d + try: + d = conn.recv(1024) + except socket.error: + d = '' if not d: - close_sesion(session_id) + close_session(session_id) break + msg += d while True: s = msg.find('\n') if s ==-1: break else: - c = msg[0:s] + c = msg[0:s].strip() msg = msg[s+1:] - c = json.loads(c) + if c == 'quit': + conn.close() + close_session(session_id) + return + try: + c = json.loads(c) + except: + print "json error", repr(c) + continue try: - cmd = c['method'] - data = c['params'] + message_id = c.get('id') + method = c.get('method') + params = c.get('params') except: print "syntax error", repr(c), ipaddr continue # add to queue - input_queue.put((session_id, cmd, data)) + input_queue.put((session_id, message_id, method, params)) + # read commands from the input queue. perform requests, etc. this should be called from the main thread. def process_input_queue(): while not stopping: - session_id, cmd, data = input_queue.get() + session_id, message_id, method, data = input_queue.get() + if session_id not in sessions.keys(): + continue out = None - if cmd == 'address.subscribe': - subscribe_to_address(session_id,data) - elif cmd == 'numblocks.subscribe': - subscribe_to_numblocks(session_id) - elif cmd == 'client.version': - sessions[session_id]['version'] = data - elif cmd == 'server.banner': - out = json.dumps( { 'method':'server.banner', 'result':config.get('server','banner').replace('\\n','\n') } ) - elif cmd == 'address.get_history': - address = data - out = json.dumps( { 'method':'address.get_history', 'address':address, 'result':store.get_history( address ) } ) - elif cmd == 'transaction.broadcast': - txo = send_tx(data) + if method == 'address.subscribe': + address = data[0] + subscribe_to_address(session_id,message_id,address) + elif method == 'numblocks.subscribe': + subscribe_to_numblocks(session_id,message_id) + elif method == 'client.version': + sessions[session_id]['version'] = data[0] + elif method == 'server.banner': + out = { 'result':config.get('server','banner').replace('\\n','\n') } + elif method == 'server.peers': + out = { 'result':peer_list.values() } + elif method == 'address.get_history': + address = data[0] + out = { 'result':store.get_history( address ) } + elif method == 'transaction.broadcast': + txo = store.send_tx(data[0]) print "sent tx:", txo - out = json.dumps( { 'method':'transaction.broadcast', 'result':txo } ) + out = {'result':txo } else: - print "unknown command", cmd + print "unknown command", method if out: + out['id'] = message_id + out = json.dumps( out ) output_queue.put((session_id, out)) # this is a separate thread @@ -760,31 +867,6 @@ def process_output_queue(): #################################################################### -def memorypool_update(store): - ds = BCDataStream.BCDataStream() - store.mempool_keys = [] - - postdata = dumps({"method": 'getmemorypool', 'params': [], 'id':'jsonrpc'}) - respdata = urllib.urlopen(bitcoind_url, postdata).read() - r = loads(respdata) - if r['error'] != None: - return - - v = r['result'].get('transactions') - for hextx in v: - ds.clear() - ds.write(hextx.decode('hex')) - tx = deserialize.parse_Transaction(ds) - tx['hash'] = util.double_sha256(tx['tx']) - tx_hash = tx['hash'][::-1].encode('hex') - store.mempool_keys.append(tx_hash) - if store.tx_find_id_and_value(tx): - pass - else: - store.import_tx(tx, False) - - store.commit() - def clean_session_thread(): @@ -792,7 +874,7 @@ def clean_session_thread(): time.sleep(30) t = time.time() for k,s in sessions.items(): - if s.get('type') == 'subscribe': continue + if s.get('type') == 'persistent': continue t0 = s['last_time'] if t - t0 > 5*60: sessions.pop(k) @@ -841,30 +923,29 @@ def irc_thread(): s.close() +def get_peers_json(_,__): + return peer_list.values() -def http_server_thread(store): +def http_server_thread(): # see http://code.google.com/p/jsonrpclib/ from SocketServer import ThreadingMixIn - from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer - class SimpleThreadedJSONRPCServer(ThreadingMixIn, SimpleJSONRPCServer): pass - server = SimpleThreadedJSONRPCServer(( config.get('server','host'), 8081)) - server.register_function(lambda : peer_list.values(), 'peers') + from StratumJSONRPCServer import StratumJSONRPCServer + class StratumThreadedJSONRPCServer(ThreadingMixIn, StratumJSONRPCServer): pass + server = StratumThreadedJSONRPCServer(( config.get('server','host'), 8081)) + server.register_function(get_peers_json, 'server.peers') server.register_function(cmd_stop, 'stop') server.register_function(cmd_load, 'load') - server.register_function(lambda : block_number, 'blocks') - server.register_function(clear_cache, 'clear_cache') - server.register_function(get_cache, 'get_cache') - server.register_function(send_tx, 'blockchain.transaction.broadcast') - server.register_function(store.get_history, 'blockchain.address.get_history') - server.register_function(new_session, 'session.new') - server.register_function(update_session, 'session.update') - server.register_function(poll_session, 'session.poll') + server.register_function(get_banner, 'server.banner') + server.register_function(lambda a,b,c: store.send_tx(c), 'transaction.broadcast') + server.register_function(address_get_history_json, 'address.get_history') + server.register_function(add_address_to_session_json, 'address.subscribe') + server.register_function(subscribe_to_numblocks_json, 'numblocks.subscribe') + server.register_function(client_version_json, 'client.version') + server.register_function(create_session_json, 'session.create') # internal message (not part of protocol) + server.register_function(poll_session_json, 'session.poll') # internal message (not part of protocol) server.serve_forever() -import traceback - - if __name__ == '__main__': if len(sys.argv)>1: @@ -874,7 +955,7 @@ if __name__ == '__main__': if cmd == 'load': out = server.load(password) elif cmd == 'peers': - out = server.peers() + out = server.server.peers() elif cmd == 'stop': out = server.stop(password) elif cmd == 'clear_cache': @@ -882,73 +963,46 @@ if __name__ == '__main__': elif cmd == 'get_cache': out = server.get_cache(password,sys.argv[2]) elif cmd == 'h': - out = server.blockchain.address.get_history(sys.argv[2]) + out = server.address.get_history(sys.argv[2]) elif cmd == 'tx': - out = server.blockchain.transaction.broadcast(sys.argv[2]) + out = server.transaction.broadcast(sys.argv[2]) elif cmd == 'b': - out = server.blocks() + out = server.numblocks.subscribe() else: out = "Unknown command: '%s'" % cmd print out sys.exit(0) - - print "starting Electrum server" - print "cache:", config.get('server', 'cache') - - conf = DataStore.CONFIG_DEFAULTS - args, argv = readconf.parse_argv( [], conf) - args.dbtype= config.get('database','type') - if args.dbtype == 'sqlite3': - args.connect_args = { 'database' : config.get('database','database') } - elif args.dbtype == 'MySQLdb': - args.connect_args = { 'db' : config.get('database','database'), 'user' : config.get('database','username'), 'passwd' : config.get('database','password') } - elif args.dbtype == 'psycopg2': - args.connect_args = { 'database' : config.get('database','database') } - store = MyStore(args) - store.tx_cache = {} - store.mempool_keys = {} + # backend + # from db import MyStore + store = MyStore(config) # supported protocols thread.start_new_thread(native_server_thread, ()) thread.start_new_thread(tcp_server_thread, ()) - thread.start_new_thread(http_server_thread, (store,)) - + thread.start_new_thread(http_server_thread, ()) thread.start_new_thread(clean_session_thread, ()) if (config.get('server','irc') == 'yes' ): thread.start_new_thread(irc_thread, ()) - while not stopping: - try: - dblock.acquire() - store.catch_up() - memorypool_update(store) - block_number = store.get_block_number(1) - - if block_number != old_block_number: - old_block_number = block_number - for session_id in sessions_sub_numblocks: - send_numblocks(session_id) + print "starting Electrum server" - except IOError: - print "IOError: cannot reach bitcoind" - block_number = 0 - except: - traceback.print_exc(file=sys.stdout) - block_number = 0 - finally: - dblock.release() + old_block_number = None + while not stopping: + block_number = store.main_iteration() - # do addresses + if block_number != old_block_number: + old_block_number = block_number + for session_id in sessions_sub_numblocks.keys(): + send_numblocks(session_id) while True: try: - addr = address_queue.get(False) + addr = store.address_queue.get(False) except: break do_update_address(addr) time.sleep(10) - print "server stopped"