incremented versions to 11
[p2pool.git] / p2pool / data.py
index 622b76f..f7b6218 100644 (file)
@@ -76,10 +76,7 @@ class Share(object):
             ('desired_version', pack.VarIntType()),
         ])),
         ('new_transaction_hashes', pack.ListType(pack.IntType(256))),
-        ('transaction_hash_refs', pack.ListType(pack.ComposedType([ # compressed by referencing previous shares' hashes
-            ('share_count', pack.VarIntType()),
-            ('tx_count', pack.VarIntType()),
-        ]))),
+        ('transaction_hash_refs', pack.ListType(pack.VarIntType(), 2)), # pairs of share_count, tx_count
         ('far_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))),
         ('max_bits', bitcoin_data.FloatingIntegerType()),
         ('bits', bitcoin_data.FloatingIntegerType()),
@@ -109,7 +106,7 @@ class Share(object):
     gentx_before_refhash = pack.VarStrType().pack(DONATION_SCRIPT) + pack.IntType(64).pack(0) + pack.VarStrType().pack('\x24' + pack.IntType(256).pack(0) + pack.IntType(32).pack(0))[:2]
     
     @classmethod
-    def generate_transaction(cls, tracker, share_data, block_target, desired_timestamp, desired_target, ref_merkle_link, desired_other_transaction_hashes, net, known_txs=None, last_txout_nonce=0):
+    def generate_transaction(cls, tracker, share_data, block_target, desired_timestamp, desired_target, ref_merkle_link, desired_other_transaction_hashes_and_fees, net, known_txs=None, last_txout_nonce=0, base_subsidy=None):
         previous_share = tracker.items[share_data['previous_share_hash']] if share_data['previous_share_hash'] is not None else None
         
         height, last = tracker.get_height_and_last(share_data['previous_share_hash'])
@@ -124,6 +121,40 @@ class Share(object):
         max_bits = bitcoin_data.FloatingInteger.from_target_upper_bound(pre_target3)
         bits = bitcoin_data.FloatingInteger.from_target_upper_bound(math.clip(desired_target, (pre_target3//10, pre_target3)))
         
+        new_transaction_hashes = []
+        new_transaction_size = 0
+        transaction_hash_refs = []
+        other_transaction_hashes = []
+        
+        past_shares = list(tracker.get_chain(share_data['previous_share_hash'], min(height, 100)))
+        tx_hash_to_this = {}
+        for i, share in enumerate(past_shares):
+            for j, tx_hash in enumerate(share.new_transaction_hashes):
+                if tx_hash not in tx_hash_to_this:
+                    tx_hash_to_this[tx_hash] = [1+i, j] # share_count, tx_count
+        for tx_hash, fee in desired_other_transaction_hashes_and_fees:
+            if tx_hash in tx_hash_to_this:
+                this = tx_hash_to_this[tx_hash]
+            else:
+                if known_txs is not None:
+                    this_size = bitcoin_data.tx_type.packed_size(known_txs[tx_hash])
+                    if new_transaction_size + this_size > 50000: # only allow 50 kB of new txns/share
+                        break
+                    new_transaction_size += this_size
+                new_transaction_hashes.append(tx_hash)
+                this = [0, len(new_transaction_hashes)-1]
+            transaction_hash_refs.extend(this)
+            other_transaction_hashes.append(tx_hash)
+        
+        included_transactions = set(other_transaction_hashes)
+        removed_fees = [fee for tx_hash, fee in desired_other_transaction_hashes_and_fees if tx_hash not in included_transactions]
+        definite_fees = sum(0 if fee is None else fee for tx_hash, fee in desired_other_transaction_hashes_and_fees if tx_hash in included_transactions)
+        if None not in removed_fees:
+            share_data = dict(share_data, subsidy=share_data['subsidy'] - sum(removed_fees))
+        else:
+            assert base_subsidy is not None
+            share_data = dict(share_data, subsidy=base_subsidy + definite_fees)
+        
         weights, total_weight, donation_weight = tracker.get_cumulative_weights(share_data['previous_share_hash'],
             min(height, net.REAL_CHAIN_LENGTH),
             65535*net.SPREAD*bitcoin_data.target_to_average_attempts(block_target),
@@ -140,27 +171,6 @@ class Share(object):
         
         dests = sorted(amounts.iterkeys(), key=lambda script: (script == DONATION_SCRIPT, amounts[script], script))[-4000:] # block length limit, unlikely to ever be hit
         
-        new_transaction_hashes = []
-        new_transaction_size = 0
-        transaction_hash_refs = []
-        other_transaction_hashes = []
-        
-        for tx_hash in desired_other_transaction_hashes:
-            for i, share in enumerate(tracker.get_chain(share_data['previous_share_hash'], min(height, 100))):
-                if tx_hash in share.new_transaction_hashes:
-                    this = dict(share_count=i+1, tx_count=share.new_transaction_hashes.index(tx_hash))
-                    break
-            else:
-                if known_txs is not None:
-                    this_size = bitcoin_data.tx_type.packed_size(known_txs[tx_hash])
-                    if new_transaction_size + this_size > 50000: # only allow 50 kB of new txns/share
-                        break
-                    new_transaction_size += this_size
-                new_transaction_hashes.append(tx_hash)
-                this = dict(share_count=0, tx_count=len(new_transaction_hashes)-1)
-            transaction_hash_refs.append(this)
-            other_transaction_hashes.append(tx_hash)
-        
         share_info = dict(
             share_data=share_data,
             far_share_hash=None if last is None and height < 99 else tracker.get_nth_parent_hash(share_data['previous_share_hash'], 99),
@@ -188,9 +198,9 @@ class Share(object):
             lock_time=0,
         )
         
-        def get_share(header, transactions):
-            min_header=dict(header);del min_header['merkle_root']
-            return cls(net, None, dict(
+        def get_share(header, last_txout_nonce=last_txout_nonce):
+            min_header = dict(header); del min_header['merkle_root']
+            share = cls(net, None, dict(
                 min_header=min_header,
                 share_info=share_info,
                 ref_merkle_link=dict(branch=[], index=0),
@@ -198,6 +208,8 @@ class Share(object):
                 hash_link=prefix_to_hash_link(bitcoin_data.tx_type.pack(gentx)[:-32-4-4], cls.gentx_before_refhash),
                 merkle_link=bitcoin_data.calculate_merkle_link([None] + other_transaction_hashes, 0),
             ))
+            assert share.header == header # checks merkle_root
+            return share
         
         return share_info, gentx, other_transaction_hashes, get_share
     
@@ -237,10 +249,10 @@ class Share(object):
         self.desired_version = self.share_data['desired_version']
         
         n = set()
-        for x in self.share_info['transaction_hash_refs']:
-            assert x['share_count'] < 110
-            if x['share_count'] == 0:
-                n.add(x['tx_count'])
+        for share_count, tx_count in self.iter_transaction_hash_refs():
+            assert share_count < 110
+            if share_count == 0:
+                n.add(tx_count)
         assert n == set(range(len(self.share_info['new_transaction_hashes'])))
         
         self.gentx_hash = check_hash_link(
@@ -272,6 +284,9 @@ class Share(object):
     def as_share(self):
         return dict(type=self.VERSION, contents=self.share_type.pack(self.contents))
     
+    def iter_transaction_hash_refs(self):
+        return zip(self.share_info['transaction_hash_refs'][::2], self.share_info['transaction_hash_refs'][1::2])
+    
     def check(self, tracker):
         from p2pool import p2p
         if self.share_data['previous_share_hash'] is not None:
@@ -291,9 +306,9 @@ class Share(object):
             else:
                 raise p2p.PeerMisbehavingError('''%s can't follow %s''' % (type(self).__name__, type(previous_share).__name__))
         
-        other_tx_hashes = [tracker.items[tracker.get_nth_parent_hash(self.hash, x['share_count'])].share_info['new_transaction_hashes'][x['tx_count']] for x in self.share_info['transaction_hash_refs']]
+        other_tx_hashes = [tracker.items[tracker.get_nth_parent_hash(self.hash, share_count)].share_info['new_transaction_hashes'][tx_count] for share_count, tx_count in self.iter_transaction_hash_refs()]
         
-        share_info, gentx, other_tx_hashes2, get_share = self.generate_transaction(tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.share_info['bits'].target, self.contents['ref_merkle_link'], other_tx_hashes, self.net, last_txout_nonce=self.contents['last_txout_nonce'])
+        share_info, gentx, other_tx_hashes2, get_share = self.generate_transaction(tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.share_info['bits'].target, self.contents['ref_merkle_link'], [(h, None) for h in other_tx_hashes], self.net, last_txout_nonce=self.contents['last_txout_nonce'])
         assert other_tx_hashes2 == other_tx_hashes
         if share_info != self.share_info:
             raise ValueError('share_info invalid')
@@ -306,12 +321,12 @@ class Share(object):
         return gentx # only used by as_block
     
     def get_other_tx_hashes(self, tracker):
-        parents_needed = max(x['share_count'] for x in self.share_info['transaction_hash_refs']) if self.share_info['transaction_hash_refs'] else 0
+        parents_needed = max(share_count for share_count, tx_count in self.iter_transaction_hash_refs()) if self.share_info['transaction_hash_refs'] else 0
         parents = tracker.get_height(self.hash) - 1
         if parents < parents_needed:
             return None
         last_shares = list(tracker.get_chain(self.hash, parents_needed + 1))
-        return [last_shares[x['share_count']].share_info['new_transaction_hashes'][x['tx_count']] for x in self.share_info['transaction_hash_refs']]
+        return [last_shares[share_count].share_info['new_transaction_hashes'][tx_count] for share_count, tx_count in self.iter_transaction_hash_refs()]
     
     def _get_other_txs(self, tracker, known_txs):
         other_tx_hashes = self.get_other_tx_hashes(tracker)
@@ -332,7 +347,7 @@ class Share(object):
         
         other_txs = self._get_other_txs(tracker, known_txs)
         if other_txs is None:
-            if self.time_seen is not None: # ignore if loaded from ShareStore
+            if self.time_seen != 0: # ignore if loaded from ShareStore
                 return True, 'not all txs present'
         else:
             all_txs_size = sum(bitcoin_data.tx_type.packed_size(tx) for tx in other_txs)
@@ -521,7 +536,7 @@ class OkayTracker(forest.Tracker):
         block_height = max(block_rel_height_func(share.header['previous_block']) for share in
             self.verified.get_chain(end_point, self.net.CHAIN_LENGTH//16))
         
-        return self.net.CHAIN_LENGTH, self.verified.get_delta(share_hash, end_point).work//((0 - block_height + 1)*self.net.PARENT.BLOCK_PERIOD)
+        return self.net.CHAIN_LENGTH, self.verified.get_delta(share_hash, end_point).work/((0 - block_height + 1)*self.net.PARENT.BLOCK_PERIOD)
 
 def get_pool_attempts_per_second(tracker, previous_share_hash, dist, min_work=False, integer=False):
     assert dist >= 2
@@ -580,7 +595,7 @@ def get_warnings(tracker, best_share, net, bitcoind_warning, bitcoind_work_value
     desired_version_counts = get_desired_version_counts(tracker, best_share,
         min(net.CHAIN_LENGTH, 60*60//net.SHARE_PERIOD, tracker.get_height(best_share)))
     majority_desired_version = max(desired_version_counts, key=lambda k: desired_version_counts[k])
-    if majority_desired_version > Share.VERSION and desired_version_counts[majority_desired_version] > sum(desired_version_counts.itervalues())/2:
+    if majority_desired_version > 11 and desired_version_counts[majority_desired_version] > sum(desired_version_counts.itervalues())/2:
         res.append('A MAJORITY OF SHARES CONTAIN A VOTE FOR AN UNSUPPORTED SHARE IMPLEMENTATION! (v%i with %i%% support)\n'
             'An upgrade is likely necessary. Check http://p2pool.forre.st/ for more information.' % (
                 majority_desired_version, 100*desired_version_counts[majority_desired_version]/sum(desired_version_counts.itervalues())))