tx preforwarding working
authorForrest Voight <forrest@forre.st>
Thu, 4 Oct 2012 03:31:26 +0000 (23:31 -0400)
committerForrest Voight <forrest@forre.st>
Mon, 15 Oct 2012 06:15:29 +0000 (02:15 -0400)
p2pool/data.py
p2pool/main.py
p2pool/p2p.py

index ab1c24b..6ab7126 100644 (file)
@@ -275,6 +275,8 @@ class Share(object):
         return dict(header=self.header, txs=[self.check(tracker)] + self.other_txs)
 
 class NewShare(object):
+    other_txs = None
+    
     small_block_header_type = pack.ComposedType([
         ('version', pack.VarIntType()),
         ('previous_block', pack.PossiblyNoneType(0, pack.IntType(256))),
@@ -421,7 +423,7 @@ class NewShare(object):
             share_info=share_info,
         ))), ref_merkle_link))
     
-    __slots__ = 'net peer contents min_header share_info hash_link merkle_link other_txs hash share_data max_target target timestamp previous_hash new_script desired_version gentx_hash header pow_hash header_hash new_transaction_hashes time_seen'.split(' ')
+    __slots__ = 'net peer contents min_header share_info hash_link merkle_link hash share_data max_target target timestamp previous_hash new_script desired_version gentx_hash header pow_hash header_hash new_transaction_hashes time_seen'.split(' ')
     
     def __init__(self, net, peer, contents):
         self.net = net
@@ -450,7 +452,9 @@ class NewShare(object):
         self.desired_version = self.share_data['desired_version']
         
         for x in self.share_info['transaction_hash_refs']:
-            assert x['share_count'] < net.CHAIN_LENGTH
+            assert x['share_count'] < 110
+        for i, x in enumerate(self.share_info['new_transaction_hashes']):
+            assert dict(share_count=0, tx_count=i) in self.share_info['transaction_hash_refs']
         
         self.gentx_hash = check_hash_link(
             self.hash_link,
@@ -494,11 +498,18 @@ class NewShare(object):
         if bitcoin_data.calculate_merkle_link([None] + other_tx_hashes, 0) != self.merkle_link:
             raise ValueError('merkle_link and other_tx_hashes do not match')
         
-        #return [gentx] + other_txs # only used by as_block
+        return gentx # only used by as_block
     
     def as_block(self, tracker):
-        assert False
-        return dict(header=self.header, txs=self.check(tracker))
+        other_tx_hashes = [tracker.items[tracker.get_nth_parent_hash(self.hash, x['share_count'])].share_info['new_transaction_hashes'][x['tx_count']] for x in self.share_info['transaction_hash_refs']]
+        
+        
+        print [tx_hash in self.peer.remembered_txs for tx_hash in other_tx_hashes]
+        txs = [self.check(tracker)] + [self.peer.remembered_txs[tx_hash] for tx_hash in other_tx_hashes]
+        print
+        print 'SUCCESS'
+        print
+        return dict(header=self.header, txs=txs)
 
 
 class WeightsSkipList(forest.TrackerSkipList):
index 784c9d5..98b4e37 100644 (file)
@@ -243,6 +243,15 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
         
         # setup p2p logic and join p2pool network
         
+        known_txs_var = variable.Variable({}) # hash -> tx
+        mining_txs_var = variable.Variable({}) # hash -> tx
+        @bitcoind_work.changed.watch
+        def _(work):
+            new_mining_txs = {}
+            for tx in work['transactions']:
+                new_mining_txs[bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx))] = tx
+            mining_txs_var.set(new_mining_txs)
+        
         class Node(p2p.Node):
             def handle_shares(self, shares, peer):
                 if len(shares) > 5:
@@ -377,6 +386,8 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
             connect_addrs=connect_addrs,
             max_incoming_conns=args.p2pool_conns,
             traffic_happened=traffic_happened,
+            known_txs_var=known_txs_var,
+            mining_txs_var=mining_txs_var,
         )
         p2p_node.start()
         
index fd73a89..f08a6b7 100644 (file)
@@ -29,6 +29,8 @@ class Protocol(p2protocol.Protocol):
         
         self.factory.proto_made_connection(self)
         
+        self.connection_lost_event = variable.Event()
+        
         self.addr = self.transport.getPeer().host, self.transport.getPeer().port
         
         self.send_version(
@@ -58,6 +60,9 @@ class Protocol(p2protocol.Protocol):
             timeout=15,
             on_timeout=self.transport.loseConnection,
         )
+        
+        self.remote_tx_hashes = set() # view of peer's known_txs # not actually initially empty, but sending txs instead of tx hashes won't hurt
+        self.remembered_txs = {} # view of peer's mining_txs
     
     def _connect_timeout(self):
         self.timeout_delayed = None
@@ -137,6 +142,31 @@ class Protocol(p2protocol.Protocol):
         
         if best_share_hash is not None:
             self.node.handle_share_hashes([best_share_hash], self)
+        
+        def update_remote_view_of_my_known_txs(before, after):
+            added = set(after) - set(before)
+            removed = set(before) - set(after)
+            if added:
+                self.send_have_tx(tx_hashes=list(added))
+            if removed:
+                self.send_losing_tx(tx_hashes=list(removed))
+                # XXX cache locally
+        watch_id = self.node.known_txs_var.transitioned.watch(update_remote_view_of_my_known_txs)
+        self.connection_lost_event.watch(lambda: self.node.known_txs_var.transitioned.unwatch(watch_id))
+        
+        self.send_have_tx(tx_hashes=self.node.known_txs_var.value.keys())
+        
+        def update_remote_view_of_my_mining_txs(before, after):
+            added = set(after) - set(before)
+            removed = set(before) - set(after)
+            if added:
+                self.send_remember_tx(tx_hashes=[x for x in added if x in self.remote_tx_hashes], txs=[after[x] for x in added if x not in self.remote_tx_hashes])
+            if removed:
+                self.send_forget_tx(tx_hashes=removed)
+        watch_id2 = self.node.mining_txs_var.transitioned.watch(update_remote_view_of_my_mining_txs)
+        self.connection_lost_event.watch(lambda: self.node.mining_txs_var.transitioned.unwatch(watch_id2))
+        
+        self.send_remember_tx(tx_hashes=[], txs=self.node.mining_txs_var.value.values())
     
     message_ping = pack.ComposedType([])
     def handle_ping(self):
@@ -239,13 +269,48 @@ class Protocol(p2protocol.Protocol):
             res = failure.Failure("sharereply result: " + result)
         self.get_shares.got_response(id, res)
     
+    
     message_bestblock = pack.ComposedType([
         ('header', bitcoin_data.block_header_type),
     ])
     def handle_bestblock(self, header):
         self.node.handle_bestblock(header, self)
     
+    
+    message_have_tx = pack.ComposedType([
+        ('tx_hashes', pack.ListType(pack.IntType(256))),
+    ])
+    def handle_have_tx(self, tx_hashes):
+        self.remote_tx_hashes.update(tx_hashes)
+    message_losing_tx = pack.ComposedType([
+        ('tx_hashes', pack.ListType(pack.IntType(256))),
+    ])
+    def handle_losing_tx(self, tx_hashes):
+        self.remote_tx_hashes.difference_update(tx_hashes)
+    
+    
+    message_remember_tx = pack.ComposedType([
+        ('tx_hashes', pack.ListType(pack.IntType(256))),
+        ('txs', pack.ListType(bitcoin_data.tx_type)),
+    ])
+    def handle_remember_tx(self, tx_hashes, txs):
+        for tx_hash in tx_hashes:
+            if tx_hash not in self.remembered_txs:
+                self.remembered_txs[tx_hash] = self.node.known_txs_var.value[tx_hash]
+        for tx in txs:
+            tx_hash = bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx))
+            if tx_hash not in self.remembered_txs:
+                self.remembered_txs[tx_hash] = tx
+    message_forget_tx = pack.ComposedType([
+        ('tx_hashes', pack.ListType(pack.IntType(256))),
+    ])
+    def handle_forget_tx(self, tx_hashes):
+        for tx_hash in tx_hashes:
+            del self.remembered_txs[tx_hash]
+    
+    
     def connectionLost(self, reason):
+        self.connection_lost_event.happened()
         if self.timeout_delayed is not None:
             self.timeout_delayed.cancel()
         if self.connected2:
@@ -407,10 +472,12 @@ class SingleClientFactory(protocol.ReconnectingClientFactory):
         self.node.lost_conn(proto, reason)
 
 class Node(object):
-    def __init__(self, best_share_hash_func, port, net, addr_store={}, connect_addrs=set(), desired_outgoing_conns=10, max_outgoing_attempts=30, max_incoming_conns=50, preferred_storage=1000, traffic_happened=variable.Event()):
+    def __init__(self, best_share_hash_func, port, net, known_txs_var=variable.Variable({}), mining_txs_var=variable.Variable({}), addr_store={}, connect_addrs=set(), desired_outgoing_conns=10, max_outgoing_attempts=30, max_incoming_conns=50, preferred_storage=1000, traffic_happened=variable.Event()):
         self.best_share_hash_func = best_share_hash_func
         self.port = port
         self.net = net
+        self.known_txs_var = known_txs_var
+        self.mining_txs_var = mining_txs_var
         self.addr_store = dict(addr_store)
         self.connect_addrs = connect_addrs
         self.preferred_storage = preferred_storage