get_chunk for SPV
[electrum-server.git] / backends / abe / __init__.py
index 74808c4..b65ef46 100644 (file)
@@ -10,6 +10,29 @@ from Queue import Queue
 import time, threading
 
 
+import hashlib
+encode = lambda x: x[::-1].encode('hex')
+decode = lambda x: x.decode('hex')[::-1]
+Hash = lambda x: hashlib.sha256(hashlib.sha256(x).digest()).digest()
+
+def rev_hex(s):
+    return s.decode('hex')[::-1].encode('hex')
+
+def int_to_hex(i, length=1):
+    s = hex(i)[2:].rstrip('L')
+    s = "0"*(2*length - len(s)) + s
+    return rev_hex(s)
+
+def header_to_string(res):
+    s = int_to_hex(res.get('version'),4) \
+        + rev_hex(res.get('prev_block_hash')) \
+        + rev_hex(res.get('merkle_root')) \
+        + int_to_hex(int(res.get('timestamp')),4) \
+        + int_to_hex(int(res.get('bits')),4) \
+        + int_to_hex(int(res.get('nonce')),4)
+    return s
+
+
 class AbeStore(Datastore_class):
 
     def __init__(self, config):
@@ -48,6 +71,7 @@ class AbeStore(Datastore_class):
 
         self.dblock = thread.allocate_lock()
         self.last_tx_id = 0
+        self.known_mempool_hashes = []
 
     
     def import_tx(self, tx, is_coinbase):
@@ -405,17 +429,8 @@ class AbeStore(Datastore_class):
         return out
         
 
-    def get_tx_merkle(self, tx_hash):
-
-        out = self.safe_sql("""
-             SELECT block_tx.block_id FROM tx 
-             JOIN block_tx on tx.tx_id = block_tx.tx_id 
-             JOIN chain_summary on chain_summary.block_id = block_tx.block_id
-             WHERE tx_hash='%s' AND in_longest = 1"""%tx_hash)
-        block_id = out[0]
-
-        # get the block header
-        out = self.safe_sql("""
+    def get_chunk(self, index):
+        sql = """
             SELECT
                 block_hash,
                 block_version,
@@ -427,12 +442,41 @@ class AbeStore(Datastore_class):
                 prev_block_hash,
                 block_height
               FROM chain_summary
-             WHERE block_id = %d AND in_longest = 1"""%block_id)
+             WHERE block_height >= %d AND block_height< %d AND in_longest = 1"""%(index*2016, (index+1)*2016)
+
+        out = self.safe_sql(sql)
+        msg = ''
+        for row in out:
+            (block_hash, block_version, hashMerkleRoot, nTime, nBits, nNonce, height, prev_block_hash, block_height) \
+                = ( self.hashout_hex(row[0]), int(row[1]), self.hashout_hex(row[2]), int(row[3]), int(row[4]), int(row[5]), int(row[6]), self.hashout_hex(row[7]), int(row[8]) )
+            h = {"block_height":block_height, "version":block_version, "prev_block_hash":prev_block_hash, 
+                   "merkle_root":hashMerkleRoot, "timestamp":nTime, "bits":nBits, "nonce":nNonce}
+
+            if h.get('block_height')==0: h['prev_block_hash'] = "0"*64
+            msg += header_to_string(h)
+
+            #print "hash", encode(Hash(msg.decode('hex')))
+            #if h.get('block_height')==1:break
+
+        print "get_chunk", index, len(msg)
+        return msg
+
+
+
+    def get_tx_merkle(self, tx_hash):
+
+        out = self.safe_sql("""
+             SELECT block_tx.block_id FROM tx 
+             JOIN block_tx on tx.tx_id = block_tx.tx_id 
+             JOIN chain_summary on chain_summary.block_id = block_tx.block_id
+             WHERE tx_hash='%s' AND in_longest = 1"""%tx_hash)
+        block_id = out[0]
+
+        # get block height
+        out = self.safe_sql("SELECT block_height FROM chain_summary WHERE block_id = %d AND in_longest = 1"%block_id)
 
         if not out: raise BaseException("block not found")
-        row = out[0]
-        (block_hash, block_version, hashMerkleRoot, nTime, nBits, nNonce, height, prev_block_hash, block_height) \
-            = ( self.hashout_hex(row[0]), int(row[1]), self.hashout_hex(row[2]), int(row[3]), int(row[4]), int(row[5]), int(row[6]), self.hashout_hex(row[7]), int(row[8]) )
+        block_height = int(out[0][0])
 
         merkle = []
         # list all tx in block
@@ -446,10 +490,6 @@ class AbeStore(Datastore_class):
 
         # find subset.
         # TODO: do not compute this on client request, better store the hash tree of each block in a database...
-        import hashlib
-        encode = lambda x: x[::-1].encode('hex')
-        decode = lambda x: x.decode('hex')[::-1]
-        Hash = lambda x: hashlib.sha256(hashlib.sha256(x).digest()).digest()
 
         merkle = map(decode, merkle)
         target_hash = decode(tx_hash)
@@ -459,21 +499,19 @@ class AbeStore(Datastore_class):
             if len(merkle)%2: merkle.append( merkle[-1] )
             n = []
             while merkle:
+                new_hash = Hash( merkle[0] + merkle[1] )
                 if merkle[0] == target_hash:
                     s.append( "L" + encode(merkle[1]))
-                    n.append( target_hash )
+                    target_hash = new_hash
                 elif merkle[1] == target_hash:
                     s.append( "R" + encode(merkle[0]))
-                    n.append( target_hash)
-                else:
-                    n.append( Hash( merkle[0] + merkle[1] ) )
+                    target_hash = new_hash
+                n.append( new_hash )
                 merkle = merkle[2:]
             merkle = n
 
         # send result
-        out = {"block_height":block_height, "version":block_version, "prev_block":prev_block_hash, 
-                "merkle_root":hashMerkleRoot, "timestamp":nTime, "bits":nBits, "nonce":nNonce, "merkle":s}
-        return out
+        return {"block_height":block_height,"merkle":s}
 
 
 
@@ -481,33 +519,43 @@ class AbeStore(Datastore_class):
     def memorypool_update(store):
 
         ds = BCDataStream.BCDataStream()
-        postdata = dumps({"method": 'getmemorypool', 'params': [], 'id':'jsonrpc'})
-
+        postdata = dumps({"method": 'getrawmempool', 'params': [], 'id':'jsonrpc'})
         respdata = urllib.urlopen(store.bitcoind_url, postdata).read()
         r = loads(respdata)
         if r['error'] != None:
+            print r['error']
             return
 
-        v = r['result'].get('transactions')
-        for hextx in v:
+        mempool_hashes = r.get('result')
+        for tx_hash in mempool_hashes:
+
+            if tx_hash in store.known_mempool_hashes: continue
+            store.known_mempool_hashes.append(tx_hash)
+
+            postdata = dumps({"method": 'getrawtransaction', 'params': [tx_hash], 'id':'jsonrpc'})
+            respdata = urllib.urlopen(store.bitcoind_url, postdata).read()
+            r = loads(respdata)
+            if r['error'] != None:
+                continue
+            hextx = r.get('result')
             ds.clear()
             ds.write(hextx.decode('hex'))
             tx = deserialize.parse_Transaction(ds)
             tx['hash'] = util.double_sha256(tx['tx'])
-            tx_hash = store.hashin(tx['hash'])
-
+                
             if store.tx_find_id_and_value(tx):
                 pass
             else:
                 tx_id = store.import_tx(tx, False)
                 store.update_tx_cache(tx_id)
                 #print tx_hash
-    
+
         store.commit()
+        store.known_mempool_hashes = mempool_hashes
 
 
     def send_tx(self,tx):
-        postdata = dumps({"method": 'importtransaction', 'params': [tx], 'id':'jsonrpc'})
+        postdata = dumps({"method": 'sendrawtransaction', 'params': [tx], 'id':'jsonrpc'})
         respdata = urllib.urlopen(self.bitcoind_url, postdata).read()
         r = loads(respdata)
         if r['error'] != None:
@@ -594,6 +642,14 @@ class BlockchainProcessor(Processor):
                 error = str(e) + ': %d'% height
                 print "error:", error
 
+        elif method == 'blockchain.block.get_chunk':
+            try:
+                index = params[0]
+                result = self.store.get_chunk( index ) 
+            except BaseException, e:
+                error = str(e) + ': %d'% index
+                print "error:", error
+
         elif method == 'blockchain.transaction.broadcast':
             txo = self.store.send_tx(params[0])
             print "sent tx:", txo