overload import_block
[electrum-server.git] / server.py
index cf0e291..8050c55 100755 (executable)
--- a/server.py
+++ b/server.py
@@ -50,7 +50,6 @@ config.set('server', 'host', 'localhost')
 config.set('server', 'port', 50000)
 config.set('server', 'password', '')
 config.set('server', 'irc', 'yes')
-config.set('server', 'cache', 'no') 
 config.set('server', 'ircname', 'Electrum server')
 config.add_section('database')
 config.set('database', 'type', 'psycopg2')
@@ -91,9 +90,20 @@ address_queue = Queue()
 
 class MyStore(Datastore_class):
 
-    def import_tx(self, tx, is_coinbase):
-        tx_id = super(MyStore, self).import_tx(tx, is_coinbase)
-        if config.get('server', 'cache') == 'yes': self.update_tx_cache(tx_id)
+    def import_block(self, b, chain_ids=frozenset()):
+        block_id = super(MyStore, self).import_block(b, chain_ids)
+        print "import block", block_id
+        for pos in xrange(len(b['transactions'])):
+            tx = b['transactions'][pos]
+            if 'hash' not in tx:
+                tx['hash'] = util.double_sha256(tx['tx'])
+            tx_id = store.tx_find_id_and_value(tx)
+            if tx_id:
+                self.update_tx_cache(tx_id)
+            else:
+                print "error: import_block: no tx_id"
+        return block_id
+
 
     def update_tx_cache(self, txid):
         inrows = self.get_tx_inputs(txid, False)
@@ -227,10 +237,9 @@ class MyStore(Datastore_class):
 
     def get_history(self, addr):
         
-        if config.get('server','cache') == 'yes':
-            cached_version = self.tx_cache.get( addr )
-            if cached_version is not None:
-                return cached_version
+        cached_version = self.tx_cache.get( addr )
+        if cached_version is not None:
+            return cached_version
 
         version, binaddr = decode_check_address(addr)
         if binaddr is None:
@@ -334,7 +343,7 @@ class MyStore(Datastore_class):
                     if not row[4]: txpoint['raw_scriptPubKey'] = row[1]
 
         # cache result
-        if config.get('server','cache') == 'yes' and not address_has_mempool:
+        if not address_has_mempool:
             self.tx_cache[addr] = txpoints
         
         return txpoints
@@ -441,13 +450,11 @@ def do_update_address(addr):
         addresses = session['addresses'].keys()
 
         if addr in addresses:
-            print "address ", addr, "found in session", session_id
+            print "address ", addr, "is watched by", session_id
             status = get_address_status( addr )
-            print "new_status:", status
             last_status = session['addresses'][addr]
-            print "last_status", last_status
             if last_status != status:
-                print "status is new", addr
+                print "sending new status for %s:"%addr, status
                 send_status(session_id,addr,status)
                 sessions[session_id]['addresses'][addr] = status
 
@@ -693,12 +700,15 @@ def tcp_client_thread(ipaddr,conn):
     msg = ''
 
     while not stopping:
-        d = conn.recv(1024)
-        msg += d
+        try:
+            d = conn.recv(1024)
+        except socket.error:
+            d = ''
         if not d:
             close_session(session_id)
             break
 
+        msg += d
         while True:
             s = msg.find('\n')
             if s ==-1:
@@ -716,8 +726,8 @@ def tcp_client_thread(ipaddr,conn):
                     print "json error", repr(c)
                     continue
                 try:
-                    cmd = c['method']
-                    data = c['params']
+                    cmd = c.get('method')
+                    data = c.get('params')
                 except:
                     print "syntax error", repr(c), ipaddr
                     continue
@@ -775,7 +785,9 @@ def process_output_queue():
 
 
 def memorypool_update(store):
+
     ds = BCDataStream.BCDataStream()
+    previous_transactions = store.mempool_keys
     store.mempool_keys = []
 
     postdata = dumps({"method": 'getmemorypool', 'params': [], 'id':'jsonrpc'})
@@ -790,17 +802,18 @@ def memorypool_update(store):
         ds.write(hextx.decode('hex'))
         tx = deserialize.parse_Transaction(ds)
         tx['hash'] = util.double_sha256(tx['tx'])
-        tx_hash = tx['hash'][::-1].encode('hex')
+        tx_hash = store.hashin(tx['hash'])
+
         store.mempool_keys.append(tx_hash)
         if store.tx_find_id_and_value(tx):
             pass
         else:
-            store.import_tx(tx, False)
+            tx_id = store.import_tx(tx, False)
+            store.update_tx_cache(tx_id)
 
     store.commit()
 
 
-
 def clean_session_thread():
     while not stopping:
         time.sleep(30)
@@ -908,7 +921,6 @@ if __name__ == '__main__':
 
 
     print "starting Electrum server"
-    print "cache:", config.get('server', 'cache')
 
     conf = DataStore.CONFIG_DEFAULTS
     args, argv = readconf.parse_argv( [], conf)
@@ -938,8 +950,8 @@ if __name__ == '__main__':
             dblock.acquire()
             store.catch_up()
             memorypool_update(store)
-            block_number = store.get_block_number(1)
 
+            block_number = store.get_block_number(1)
             if block_number != old_block_number:
                 old_block_number = block_number
                 for session_id in sessions_sub_numblocks: