store shares to disk and load them on startup
authorForrest Voight <forrest@forre.st>
Thu, 11 Aug 2011 06:09:10 +0000 (02:09 -0400)
committerForrest Voight <forrest@forre.st>
Thu, 11 Aug 2011 06:09:10 +0000 (02:09 -0400)
p2pool/data.py
p2pool/main.py

index 50275d7..6471d6a 100644 (file)
@@ -108,7 +108,7 @@ class Share(object):
     def from_share1b(cls, share1b):
         return cls(**share1b)
     
-    __slots__ = 'header previous_block share_info merkle_branch other_txs timestamp share_data new_script subsidy previous_hash previous_share_hash target nonce bitcoin_hash hash time_seen shared peer'.split(' ')
+    __slots__ = 'header previous_block share_info merkle_branch other_txs timestamp share_data new_script subsidy previous_hash previous_share_hash target nonce bitcoin_hash hash time_seen shared stored peer'.split(' ')
     
     def __init__(self, header, share_info, merkle_branch=None, other_txs=None):
         if merkle_branch is None and other_txs is None:
@@ -159,6 +159,7 @@ class Share(object):
         # XXX eww
         self.time_seen = time.time()
         self.shared = False
+        self.stored = False
         self.peer = None
     
     def as_block(self, tracker, net):
@@ -444,6 +445,38 @@ def format_hash(x):
         return 'xxxxxxxx'
     return '%08x' % (x % 2**32)
 
+class ShareStore(object):
+    def __init__(self, filename, net):
+        self.filename = filename
+        self.net = net
+    
+    def get_shares(self):
+        open(self.filename, 'a').close() # make empty file if it doesn't exist
+        
+        with open(self.filename) as f:
+            for line in f:
+                try:
+                    type_id_str, data_hex = line.strip().split(' ')
+                    type_id, data = int(type_id_str), data_hex.decode('hex')
+                    if type_id == 0:
+                        share = Share.from_share1a(share1a_type.unpack(data))
+                    elif type_id == 1:
+                        share = Share.from_share1b(share1b_type.unpack(data))
+                    else:
+                        raise NotImplementedError("share type %i" % (type_id,))
+                    yield share
+                except Exception:
+                    log.err(None, "Error while reading saved shares, continuing where left off:")
+    
+    def add_share(self, share):
+        f = open(self.filename, 'a')
+        if share.bitcoin_hash <= share.header['target']:
+            type_id, data = 1, share1b_type.pack(share.as_share1b())
+        else:
+            type_id, data = 0, share1a_type.pack(share.as_share1a())
+        f.write("%i %s\n" % (type_id, data.encode('hex')))
+        f.close()
+
 class Mainnet(bitcoin_data.Mainnet):
     SHARE_PERIOD = 5 # seconds
     CHAIN_LENGTH = 24*60*60//5 # shares
@@ -456,6 +489,7 @@ class Mainnet(bitcoin_data.Mainnet):
     P2P_PORT = 9333
     MAX_TARGET = 2**256//2**32 - 1
     PERSIST = True
+    SHARESTORE_FILENAME = 'shares.dat'
     HEADERSTORE_FILENAME = 'headers.dat'
 
 class Testnet(bitcoin_data.Testnet):
@@ -470,4 +504,5 @@ class Testnet(bitcoin_data.Testnet):
     P2P_PORT = 19333
     MAX_TARGET = 2**256//2**20 - 1
     PERSIST = False
+    SHARESTORE_FILENAME = 'testnet_shares.dat'
     HEADERSTORE_FILENAME = 'testnet_headers.dat'
index 147c2af..b6ab585 100644 (file)
@@ -93,9 +93,19 @@ def main(args):
         print
         
         tracker = p2pool.OkayTracker(args.net)
-        chains = expiring_dict.ExpiringDict(300)
-        def get_chain(chain_id_data):
-            return chains.setdefault(chain_id_data, Chain(chain_id_data))
+        ss = p2pool.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.SHARESTORE_FILENAME), args.net)
+        print "Loading shares..."
+        for i, share in enumerate(ss.get_shares()):
+            if share.hash in tracker.shares:
+                continue
+            share.shared = True
+            share.stored = True
+            tracker.add(share, known_verified=True)
+            if len(tracker.shares) % 1000 == 0 and tracker.shares:
+                print "    %i" % (len(tracker.shares),)
+        print "    ...done!"
+        print
+        tracker.verified.added.watch(ss.add_share)
         
         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it