check version on all networks, now that litecoin net has switched
[p2pool.git] / p2pool / p2p.py
index 49a239d..bc61982 100644 (file)
@@ -2,6 +2,7 @@ from __future__ import division
 
 import math
 import random
+import sys
 import time
 
 from twisted.internet import defer, protocol, reactor
@@ -18,12 +19,14 @@ class PeerMisbehavingError(Exception):
 
 def fragment(f, **kwargs):
     try:
-        return f(**kwargs)
+        f(**kwargs)
     except p2protocol.TooLong:
-        att(f, **dict((k, v[:len(v)//2]) for k, v in kwargs.iteritems()))
-        return att(f, **dict((k, v[len(v)//2:]) for k, v in kwargs.iteritems()))
+        fragment(f, **dict((k, v[:len(v)//2]) for k, v in kwargs.iteritems()))
+        fragment(f, **dict((k, v[len(v)//2:]) for k, v in kwargs.iteritems()))
 
 class Protocol(p2protocol.Protocol):
+    max_remembered_txs_size = 2500000
+    
     def __init__(self, node, incoming):
         p2protocol.Protocol.__init__(self, node.net.PREFIX, 1000000, node.traffic_happened)
         self.node = node
@@ -33,8 +36,6 @@ class Protocol(p2protocol.Protocol):
         self.connected2 = False
     
     def connectionMade(self):
-        p2protocol.Protocol.connectionMade(self)
-        
         self.factory.proto_made_connection(self)
         
         self.connection_lost_event = variable.Event()
@@ -42,7 +43,7 @@ class Protocol(p2protocol.Protocol):
         self.addr = self.transport.getPeer().host, self.transport.getPeer().port
         
         self.send_version(
-            version=4,
+            version=1300,
             services=0,
             addr_to=dict(
                 services=0,
@@ -66,16 +67,20 @@ class Protocol(p2protocol.Protocol):
             max_id=2**256,
             func=lambda id, hashes, parents, stops: self.send_sharereq(id=id, hashes=hashes, parents=parents, stops=stops),
             timeout=15,
-            on_timeout=self.transport.loseConnection,
+            on_timeout=self.disconnect,
         )
         
         self.remote_tx_hashes = set() # view of peer's known_txs # not actually initially empty, but sending txs instead of tx hashes won't hurt
+        self.remote_remembered_txs_size = 0
+        
         self.remembered_txs = {} # view of peer's mining_txs
+        self.remembered_txs_size = 0
+        self.known_txs_cache = {}
     
     def _connect_timeout(self):
         self.timeout_delayed = None
         print 'Handshake timed out, disconnecting from %s:%i' % self.addr
-        self.transport.loseConnection()
+        self.disconnect()
     
     def packetReceived(self, command, payload2):
         try:
@@ -89,13 +94,14 @@ class Protocol(p2protocol.Protocol):
     def badPeerHappened(self):
         if p2pool.DEBUG:
             print "Bad peer banned:", self.addr
-        self.transport.loseConnection()
-        self.node.bans[self.transport.getPeer().host] = time.time() + 60*60
+        self.disconnect()
+        if self.transport.getPeer().host != '127.0.0.1': # never ban localhost
+            self.node.bans[self.transport.getPeer().host] = time.time() + 60*60
     
     def _timeout(self):
         self.timeout_delayed = None
         print 'Connection timed out, disconnecting from %s:%i' % self.addr
-        self.transport.loseConnection()
+        self.disconnect()
     
     message_version = pack.ComposedType([
         ('version', pack.IntType(32)),
@@ -110,7 +116,7 @@ class Protocol(p2protocol.Protocol):
     def handle_version(self, version, services, addr_to, addr_from, nonce, sub_version, mode, best_share_hash):
         if self.other_version is not None:
             raise PeerMisbehavingError('more than one version message')
-        if version < 4:
+        if version < 1300:
             raise PeerMisbehavingError('peer too old')
         
         self.other_version = version
@@ -122,7 +128,7 @@ class Protocol(p2protocol.Protocol):
         if nonce in self.node.peers:
             if p2pool.DEBUG:
                 print 'Detected duplicate connection, disconnecting from %s:%i' % self.addr
-            self.transport.loseConnection()
+            self.disconnect()
             return
         
         self.nonce = nonce
@@ -145,7 +151,7 @@ class Protocol(p2protocol.Protocol):
         random.expovariate(1/100)][-1])
         
         self._stop_thread2 = deferral.run_repeatedly(lambda: [
-            self.send_addrme(port=self.node.port),
+            self.send_addrme(port=self.node.serverfactory.listen_port.getHost().port) if self.node.serverfactory.listen_port is not None else None,
         random.expovariate(1/(100*len(self.node.peers) + 1))][-1])
         
         if best_share_hash is not None:
@@ -158,7 +164,11 @@ class Protocol(p2protocol.Protocol):
                 self.send_have_tx(tx_hashes=list(added))
             if removed:
                 self.send_losing_tx(tx_hashes=list(removed))
-                # XXX cache locally
+                
+                # cache forgotten txs here for a little while so latency of "losing_tx" packets doesn't cause problems
+                key = max(self.known_txs_cache) + 1 if self.known_txs_cache else 0
+                self.known_txs_cache[key] = dict((h, before[h]) for h in removed)
+                reactor.callLater(20, self.known_txs_cache.pop, key)
         watch_id = self.node.known_txs_var.transitioned.watch(update_remote_view_of_my_known_txs)
         self.connection_lost_event.watch(lambda: self.node.known_txs_var.transitioned.unwatch(watch_id))
         
@@ -168,12 +178,17 @@ class Protocol(p2protocol.Protocol):
             added = set(after) - set(before)
             removed = set(before) - set(after)
             if added:
+                self.remote_remembered_txs_size += sum(100 + bitcoin_data.tx_type.packed_size(after[x]) for x in added)
+                assert self.remote_remembered_txs_size <= self.max_remembered_txs_size
                 fragment(self.send_remember_tx, tx_hashes=[x for x in added if x in self.remote_tx_hashes], txs=[after[x] for x in added if x not in self.remote_tx_hashes])
             if removed:
-                self.send_forget_tx(tx_hashes=removed)
+                self.send_forget_tx(tx_hashes=list(removed))
+                self.remote_remembered_txs_size -= sum(100 + bitcoin_data.tx_type.packed_size(before[x]) for x in removed)
         watch_id2 = self.node.mining_txs_var.transitioned.watch(update_remote_view_of_my_mining_txs)
         self.connection_lost_event.watch(lambda: self.node.mining_txs_var.transitioned.unwatch(watch_id2))
         
+        self.remote_remembered_txs_size += sum(100 + bitcoin_data.tx_type.packed_size(x) for x in self.node.mining_txs_var.value.values())
+        assert self.remote_remembered_txs_size <= self.max_remembered_txs_size
         fragment(self.send_remember_tx, tx_hashes=[], txs=self.node.mining_txs_var.value.values())
     
     message_ping = pack.ComposedType([])
@@ -237,24 +252,62 @@ class Protocol(p2protocol.Protocol):
         ('shares', pack.ListType(p2pool_data.share_type)),
     ])
     def handle_shares(self, shares):
-        self.node.handle_shares([p2pool_data.load_share(share, self.node.net, self) for share in shares if share['type'] not in [6, 7]], self)
+        result = []
+        for wrappedshare in shares:
+            if wrappedshare['type'] < 9: continue
+            share = p2pool_data.load_share(wrappedshare, self.node.net, self.addr)
+            if wrappedshare['type'] >= 13:
+                txs = []
+                for tx_hash in share.share_info['new_transaction_hashes']:
+                    if tx_hash in self.node.known_txs_var.value:
+                        tx = self.node.known_txs_var.value[tx_hash]
+                    else:
+                        for cache in self.known_txs_cache.itervalues():
+                            if tx_hash in cache:
+                                tx = cache[tx_hash]
+                                print 'Transaction %064x rescued from peer latency cache!' % (tx_hash,)
+                                break
+                        else:
+                            print >>sys.stderr, 'Peer referenced unknown transaction %064x, disconnecting' % (tx_hash,)
+                            self.disconnect()
+                            return
+                    txs.append(tx)
+            else:
+                txs = None
+            
+            result.append((share, txs))
+            
+        self.node.handle_shares(result, self)
     
     def sendShares(self, shares, tracker, known_txs, include_txs_with=[]):
-        if not shares:
-            return defer.succeed(None)
-        
         tx_hashes = set()
         for share in shares:
+            if share.VERSION >= 13:
+                # send full transaction for every new_transaction_hash that peer does not know
+                for tx_hash in share.share_info['new_transaction_hashes']:
+                    assert tx_hash in known_txs, 'tried to broadcast share without knowing all its new transactions'
+                    if tx_hash not in self.remote_tx_hashes:
+                        tx_hashes.add(tx_hash)
+                continue
             if share.hash in include_txs_with:
-                tx_hashes.update(share.get_other_tx_hashes(tracker))
+                x = share.get_other_tx_hashes(tracker)
+                if x is not None:
+                    tx_hashes.update(x)
         
-        hashes_to_send = [x for x in tx_hashes if x not in self.node.mining_txs_var.value]
+        hashes_to_send = [x for x in tx_hashes if x not in self.node.mining_txs_var.value and x in known_txs]
+        
+        new_remote_remembered_txs_size = self.remote_remembered_txs_size + sum(100 + bitcoin_data.tx_type.packed_size(known_txs[x]) for x in hashes_to_send)
+        if new_remote_remembered_txs_size > self.max_remembered_txs_size:
+            raise ValueError('shares have too many txs')
+        self.remote_remembered_txs_size = new_remote_remembered_txs_size
+        
+        fragment(self.send_remember_tx, tx_hashes=[x for x in hashes_to_send if x in self.remote_tx_hashes], txs=[known_txs[x] for x in hashes_to_send if x not in self.remote_tx_hashes])
         
-        fragment(self.send_remember_tx, tx_hashes=[x for x in hashes_to_send if x in self.remote_tx_hashes], txs=[known_txs[x] for x in hashes_to_send if x not in self.remote_tx_hashes and x in known_txs]) # last one required?
         fragment(self.send_shares, shares=[share.as_share() for share in shares])
-        res = self.send_forget_tx(tx_hashes=hashes_to_send)
         
-        return res
+        self.send_forget_tx(tx_hashes=hashes_to_send)
+        
+        self.remote_remembered_txs_size -= sum(100 + bitcoin_data.tx_type.packed_size(known_txs[x]) for x in hashes_to_send)
     
     
     message_sharereq = pack.ComposedType([
@@ -277,7 +330,7 @@ class Protocol(p2protocol.Protocol):
     ])
     def handle_sharereply(self, id, result, shares):
         if result == 'good':
-            res = [p2pool_data.load_share(share, self.node.net, self) for share in shares if share['type'] not in [6, 7]]
+            res = [p2pool_data.load_share(share, self.node.net, self.addr) for share in shares if share['type'] >= 9]
         else:
             res = failure.Failure("sharereply result: " + result)
         self.get_shares.got_response(id, res)
@@ -294,11 +347,15 @@ class Protocol(p2protocol.Protocol):
         ('tx_hashes', pack.ListType(pack.IntType(256))),
     ])
     def handle_have_tx(self, tx_hashes):
+        #assert self.remote_tx_hashes.isdisjoint(tx_hashes)
         self.remote_tx_hashes.update(tx_hashes)
+        while len(self.remote_tx_hashes) > 10000:
+            self.remote_tx_hashes.pop()
     message_losing_tx = pack.ComposedType([
         ('tx_hashes', pack.ListType(pack.IntType(256))),
     ])
     def handle_losing_tx(self, tx_hashes):
+        #assert self.remote_tx_hashes.issuperset(tx_hashes)
         self.remote_tx_hashes.difference_update(tx_hashes)
     
     
@@ -308,20 +365,52 @@ class Protocol(p2protocol.Protocol):
     ])
     def handle_remember_tx(self, tx_hashes, txs):
         for tx_hash in tx_hashes:
-            if tx_hash not in self.remembered_txs:
-                self.remembered_txs[tx_hash] = self.node.known_txs_var.value[tx_hash]
+            if tx_hash in self.remembered_txs:
+                print >>sys.stderr, 'Peer referenced transaction twice, disconnecting'
+                self.disconnect()
+                return
+            
+            if tx_hash in self.node.known_txs_var.value:
+                tx = self.node.known_txs_var.value[tx_hash]
+            else:
+                for cache in self.known_txs_cache.itervalues():
+                    if tx_hash in cache:
+                        tx = cache[tx_hash]
+                        print 'Transaction %064x rescued from peer latency cache!' % (tx_hash,)
+                        break
+                else:
+                    print >>sys.stderr, 'Peer referenced unknown transaction %064x, disconnecting' % (tx_hash,)
+                    self.disconnect()
+                    return
+            
+            self.remembered_txs[tx_hash] = tx
+            self.remembered_txs_size += 100 + bitcoin_data.tx_type.packed_size(tx)
         new_known_txs = dict(self.node.known_txs_var.value)
+        warned = False
         for tx in txs:
             tx_hash = bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx))
-            if tx_hash not in self.remembered_txs:
-                self.remembered_txs[tx_hash] = tx
+            if tx_hash in self.remembered_txs:
+                print >>sys.stderr, 'Peer referenced transaction twice, disconnecting'
+                self.disconnect()
+                return
+            
+            if tx_hash in self.node.known_txs_var.value and not warned:
+                print 'Peer sent entire transaction %064x that was already received' % (tx_hash,)
+                warned = True
+            
+            self.remembered_txs[tx_hash] = tx
+            self.remembered_txs_size += 100 + bitcoin_data.tx_type.packed_size(tx)
             new_known_txs[tx_hash] = tx
         self.node.known_txs_var.set(new_known_txs)
+        if self.remembered_txs_size >= self.max_remembered_txs_size:
+            raise PeerMisbehavingError('too much transaction data stored')
     message_forget_tx = pack.ComposedType([
         ('tx_hashes', pack.ListType(pack.IntType(256))),
     ])
     def handle_forget_tx(self, tx_hashes):
         for tx_hash in tx_hashes:
+            self.remembered_txs_size -= 100 + bitcoin_data.tx_type.packed_size(self.remembered_txs[tx_hash])
+            assert self.remembered_txs_size >= 0
             del self.remembered_txs[tx_hash]
     
     
@@ -353,6 +442,7 @@ class ServerFactory(protocol.ServerFactory):
         
         self.conns = {}
         self.running = False
+        self.listen_port = None
     
     def buildProtocol(self, addr):
         if sum(self.conns.itervalues()) >= self.max_conns or self.conns.get(self._host_to_ident(addr.host), 0) >= 3:
@@ -488,17 +578,17 @@ class SingleClientFactory(protocol.ReconnectingClientFactory):
         self.node.lost_conn(proto, reason)
 
 class Node(object):
-    def __init__(self, best_share_hash_func, port, net, addr_store={}, connect_addrs=set(), desired_outgoing_conns=10, max_outgoing_attempts=30, max_incoming_conns=50, preferred_storage=1000, traffic_happened=variable.Event(), known_txs_var=variable.Variable({}), mining_txs_var=variable.Variable({})):
+    def __init__(self, best_share_hash_func, port, net, addr_store={}, connect_addrs=set(), desired_outgoing_conns=10, max_outgoing_attempts=30, max_incoming_conns=50, preferred_storage=1000, known_txs_var=variable.Variable({}), mining_txs_var=variable.Variable({})):
         self.best_share_hash_func = best_share_hash_func
         self.port = port
         self.net = net
         self.addr_store = dict(addr_store)
         self.connect_addrs = connect_addrs
         self.preferred_storage = preferred_storage
-        self.traffic_happened = traffic_happened
         self.known_txs_var = known_txs_var
         self.mining_txs_var = mining_txs_var
         
+        self.traffic_happened = variable.Event()
         self.nonce = random.randrange(2**64)
         self.peers = {}
         self.bans = {} # address -> end_time
@@ -564,7 +654,8 @@ class Node(object):
             old_services, old_first_seen, old_last_seen = self.addr_store[host, port]
             self.addr_store[host, port] = services, old_first_seen, max(old_last_seen, timestamp)
         else:
-            self.addr_store[host, port] = services, timestamp, timestamp
+            if len(self.addr_store) < 10000:
+                self.addr_store[host, port] = services, timestamp, timestamp
     
     def handle_shares(self, shares, peer):
         print 'handle_shares', (shares, peer)