to reduce memory usage, keep tx pointers in interleaved list instead of each in its...
[p2pool.git] / p2pool / data.py
index a68eedf..56b3a2f 100644 (file)
@@ -76,10 +76,7 @@ class Share(object):
             ('desired_version', pack.VarIntType()),
         ])),
         ('new_transaction_hashes', pack.ListType(pack.IntType(256))),
-        ('transaction_hash_refs', pack.ListType(pack.ComposedType([ # compressed by referencing previous shares' hashes
-            ('share_count', pack.VarIntType()),
-            ('tx_count', pack.VarIntType()),
-        ]))),
+        ('transaction_hash_refs', pack.ListType(pack.VarIntType(), 2)), # pairs of share_count, tx_count
         ('far_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))),
         ('max_bits', bitcoin_data.FloatingIntegerType()),
         ('bits', bitcoin_data.FloatingIntegerType()),
@@ -145,11 +142,15 @@ class Share(object):
         transaction_hash_refs = []
         other_transaction_hashes = []
         
+        past_shares = list(tracker.get_chain(share_data['previous_share_hash'], min(height, 100)))
+        tx_hash_to_this = {}
+        for i, share in enumerate(past_shares):
+            for j, tx_hash in enumerate(share.new_transaction_hashes):
+                if tx_hash not in tx_hash_to_this:
+                    tx_hash_to_this[tx_hash] = [1+i, j] # share_count, tx_count
         for tx_hash in desired_other_transaction_hashes:
-            for i, share in enumerate(tracker.get_chain(share_data['previous_share_hash'], min(height, 100))):
-                if tx_hash in share.new_transaction_hashes:
-                    this = dict(share_count=i+1, tx_count=share.new_transaction_hashes.index(tx_hash))
-                    break
+            if tx_hash in tx_hash_to_this:
+                this = tx_hash_to_this[tx_hash]
             else:
                 if known_txs is not None:
                     this_size = bitcoin_data.tx_type.packed_size(known_txs[tx_hash])
@@ -157,8 +158,8 @@ class Share(object):
                         break
                     new_transaction_size += this_size
                 new_transaction_hashes.append(tx_hash)
-                this = dict(share_count=0, tx_count=len(new_transaction_hashes)-1)
-            transaction_hash_refs.append(this)
+                this = [0, len(new_transaction_hashes)-1]
+            transaction_hash_refs.extend(this)
             other_transaction_hashes.append(tx_hash)
         
         share_info = dict(
@@ -236,10 +237,12 @@ class Share(object):
         self.new_script = bitcoin_data.pubkey_hash_to_script2(self.share_data['pubkey_hash'])
         self.desired_version = self.share_data['desired_version']
         
-        for x in self.share_info['transaction_hash_refs']:
-            assert x['share_count'] < 110
-        for i, x in enumerate(self.share_info['new_transaction_hashes']):
-            assert dict(share_count=0, tx_count=i) in self.share_info['transaction_hash_refs']
+        n = set()
+        for share_count, tx_count in self.iter_transaction_hash_refs():
+            assert share_count < 110
+            if share_count == 0:
+                n.add(tx_count)
+        assert n == set(range(len(self.share_info['new_transaction_hashes'])))
         
         self.gentx_hash = check_hash_link(
             self.hash_link,
@@ -270,6 +273,9 @@ class Share(object):
     def as_share(self):
         return dict(type=self.VERSION, contents=self.share_type.pack(self.contents))
     
+    def iter_transaction_hash_refs(self):
+        return zip(self.share_info['transaction_hash_refs'][::2], self.share_info['transaction_hash_refs'][1::2])
+    
     def check(self, tracker):
         from p2pool import p2p
         if self.share_data['previous_share_hash'] is not None:
@@ -289,7 +295,7 @@ class Share(object):
             else:
                 raise p2p.PeerMisbehavingError('''%s can't follow %s''' % (type(self).__name__, type(previous_share).__name__))
         
-        other_tx_hashes = [tracker.items[tracker.get_nth_parent_hash(self.hash, x['share_count'])].share_info['new_transaction_hashes'][x['tx_count']] for x in self.share_info['transaction_hash_refs']]
+        other_tx_hashes = [tracker.items[tracker.get_nth_parent_hash(self.hash, share_count)].share_info['new_transaction_hashes'][tx_count] for share_count, tx_count in self.iter_transaction_hash_refs()]
         
         share_info, gentx, other_tx_hashes2, get_share = self.generate_transaction(tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.share_info['bits'].target, self.contents['ref_merkle_link'], other_tx_hashes, self.net, last_txout_nonce=self.contents['last_txout_nonce'])
         assert other_tx_hashes2 == other_tx_hashes
@@ -304,10 +310,12 @@ class Share(object):
         return gentx # only used by as_block
     
     def get_other_tx_hashes(self, tracker):
+        parents_needed = max(share_count for share_count, tx_count in self.iter_transaction_hash_refs()) if self.share_info['transaction_hash_refs'] else 0
         parents = tracker.get_height(self.hash) - 1
-        if not all(x['share_count'] <= parents for x in self.share_info['transaction_hash_refs']):
+        if parents < parents_needed:
             return None
-        return [tracker.items[tracker.get_nth_parent_hash(self.hash, x['share_count'])].share_info['new_transaction_hashes'][x['tx_count']] for x in self.share_info['transaction_hash_refs']]
+        last_shares = list(tracker.get_chain(self.hash, parents_needed + 1))
+        return [last_shares[share_count].share_info['new_transaction_hashes'][tx_count] for share_count, tx_count in self.iter_transaction_hash_refs()]
     
     def _get_other_txs(self, tracker, known_txs):
         other_tx_hashes = self.get_other_tx_hashes(tracker)
@@ -328,7 +336,7 @@ class Share(object):
         
         other_txs = self._get_other_txs(tracker, known_txs)
         if other_txs is None:
-            if self.time_seen is not None: # ignore if loaded from ShareStore
+            if self.time_seen == 0: # ignore if loaded from ShareStore
                 return True, 'not all txs present'
         else:
             all_txs_size = sum(bitcoin_data.tx_type.packed_size(tx) for tx in other_txs)