removed support for old shares
authorForrest Voight <forrest@forre.st>
Sun, 1 Apr 2012 00:48:25 +0000 (20:48 -0400)
committerForrest Voight <forrest@forre.st>
Thu, 5 Apr 2012 19:03:40 +0000 (15:03 -0400)
p2pool/data.py
p2pool/main.py
p2pool/web.py

index 2655f77..633ce6b 100644 (file)
@@ -43,19 +43,14 @@ share_type = pack.ComposedType([
 ])
 
 def load_share(share, net, peer):
-    if share['type'] in [0, 1]:
+    if share['type'] in [0, 1, 2, 3]:
         from p2pool import p2p
         raise p2p.PeerMisbehavingError('sent an obsolete share')
-    elif share['type'] == 2:
+    elif share['type'] == 4:
         return Share(net, peer, other_txs=None, **Share.share1a_type.unpack(share['contents']))
-    elif share['type'] == 3:
+    elif share['type'] == 5:
         share1b = Share.share1b_type.unpack(share['contents'])
         return Share(net, peer, merkle_link=bitcoin_data.calculate_merkle_link([0] + [bitcoin_data.hash256(bitcoin_data.tx_type.pack(x)) for x in share1b['other_txs']], 0), **share1b)
-    elif share['type'] == 4:
-        return NewShare(net, peer, other_txs=None, **NewShare.share1a_type.unpack(share['contents']))
-    elif share['type'] == 5:
-        share1b = NewShare.share1b_type.unpack(share['contents'])
-        return NewShare(net, peer, merkle_link=bitcoin_data.calculate_merkle_link([0] + [bitcoin_data.hash256(bitcoin_data.tx_type.pack(x)) for x in share1b['other_txs']], 0), **share1b)
     else:
         raise ValueError('unknown share type: %r' % (share['type'],))
 
@@ -78,193 +73,6 @@ class Share(object):
         ('subsidy', pack.IntType(64)),
         ('donation', pack.IntType(16)),
         ('stale_info', pack.IntType(8)), # 0 nothing, 253 orphan, 254 doa
-    ])
-    
-    share_info_type = pack.ComposedType([
-        ('share_data', share_data_type),
-        ('max_bits', bitcoin_data.FloatingIntegerType()),
-        ('bits', bitcoin_data.FloatingIntegerType()),
-        ('timestamp', pack.IntType(32)),
-    ])
-    
-    share1a_type = pack.ComposedType([
-        ('min_header', small_block_header_type),
-        ('share_info', share_info_type),
-        ('hash_link', hash_link_type),
-        ('merkle_link', pack.ComposedType([
-            ('branch', pack.ListType(pack.IntType(256))),
-            ('index', pack.IntType(0)), # it will always be 0
-        ])),
-    ])
-    
-    share1b_type = pack.ComposedType([
-        ('min_header', small_block_header_type),
-        ('share_info', share_info_type),
-        ('hash_link', hash_link_type),
-        ('other_txs', pack.ListType(bitcoin_data.tx_type)),
-    ])
-    
-    ref_type = pack.ComposedType([
-        ('identifier', pack.FixedStrType(64//8)),
-        ('share_info', share_info_type),
-    ])
-    
-    gentx_before_refhash = pack.VarStrType().pack(DONATION_SCRIPT) + pack.IntType(64).pack(0) + pack.VarStrType().pack('\x20' + pack.IntType(256).pack(0))[:2]
-    
-    @classmethod
-    def generate_transaction(cls, tracker, share_data, block_target, desired_timestamp, desired_target, net):
-        previous_share = tracker.shares[share_data['previous_share_hash']] if share_data['previous_share_hash'] is not None else None
-        
-        height, last = tracker.get_height_and_last(share_data['previous_share_hash'])
-        assert height >= net.REAL_CHAIN_LENGTH or last is None
-        if height < net.TARGET_LOOKBEHIND:
-            pre_target3 = net.MAX_TARGET
-        else:
-            attempts_per_second = get_pool_attempts_per_second(tracker, share_data['previous_share_hash'], net.TARGET_LOOKBEHIND, min_work=True, integer=True)
-            pre_target = 2**256//(net.SHARE_PERIOD*attempts_per_second) - 1 if attempts_per_second else 2**256-1
-            pre_target2 = math.clip(pre_target, (previous_share.max_target*9//10, previous_share.max_target*11//10))
-            pre_target3 = math.clip(pre_target2, (0, net.MAX_TARGET))
-        max_bits = bitcoin_data.FloatingInteger.from_target_upper_bound(pre_target3)
-        bits = bitcoin_data.FloatingInteger.from_target_upper_bound(math.clip(desired_target, (pre_target3//10, pre_target3)))
-        
-        weights, total_weight, donation_weight = tracker.get_cumulative_weights(share_data['previous_share_hash'],
-            min(height, net.REAL_CHAIN_LENGTH),
-            65535*net.SPREAD*bitcoin_data.target_to_average_attempts(block_target),
-            True,
-        )
-        assert total_weight == sum(weights.itervalues()) + donation_weight, (total_weight, sum(weights.itervalues()) + donation_weight)
-        
-        amounts = dict((script, share_data['subsidy']*(199*weight)//(200*total_weight)) for script, weight in weights.iteritems()) # 99.5% goes according to weights prior to this share
-        this_script = bitcoin_data.pubkey_hash_to_script2(share_data['pubkey_hash'])
-        amounts[this_script] = amounts.get(this_script, 0) + share_data['subsidy']//200 # 0.5% goes to block finder
-        amounts[DONATION_SCRIPT] = amounts.get(DONATION_SCRIPT, 0) + share_data['subsidy'] - sum(amounts.itervalues()) # all that's left over is the donation weight and some extra satoshis due to rounding
-        
-        if sum(amounts.itervalues()) != share_data['subsidy'] or any(x < 0 for x in amounts.itervalues()):
-            raise ValueError()
-        
-        dests = sorted(amounts.iterkeys(), key=lambda script: (script == DONATION_SCRIPT, amounts[script], script))[-4000:] # block length limit, unlikely to ever be hit
-        
-        share_info = dict(
-            share_data=share_data,
-            max_bits=max_bits,
-            bits=bits,
-            timestamp=math.clip(desired_timestamp, (
-                (previous_share.timestamp + net.SHARE_PERIOD) - (net.SHARE_PERIOD - 1), # = previous_share.timestamp + 1
-                (previous_share.timestamp + net.SHARE_PERIOD) + (net.SHARE_PERIOD - 1),
-            )) if previous_share is not None else desired_timestamp,
-        )
-        
-        return share_info, dict(
-            version=1,
-            tx_ins=[dict(
-                previous_output=None,
-                sequence=None,
-                script=share_data['coinbase'].ljust(2, '\x00'),
-            )],
-            tx_outs=[dict(value=amounts[script], script=script) for script in dests if amounts[script]] + [dict(
-                value=0,
-                script='\x20' + pack.IntType(256).pack(bitcoin_data.hash256(cls.ref_type.pack(dict(
-                    identifier=net.IDENTIFIER,
-                    share_info=share_info,
-                )))),
-            )],
-            lock_time=0,
-        )
-    
-    __slots__ = 'net min_header share_info hash_link merkle_link other_txs hash share_data max_target target timestamp previous_hash new_script desired_version gentx_hash header pow_hash header_hash time_seen peer'.split(' ')
-    
-    def __init__(self, net, peer, min_header, share_info, hash_link, merkle_link, other_txs):
-        if len(share_info['share_data']['coinbase']) > 100:
-            raise ValueError('''coinbase too large! %i bytes''' % (len(self.share_data['coinbase']),))
-        
-        if len(merkle_link['branch']) > 16:
-            raise ValueError('merkle_branch too long!')
-        
-        if p2pool.DEBUG and other_txs is not None and bitcoin_data.calculate_merkle_link([0] + [bitcoin_data.hash256(bitcoin_data.tx_type.pack(x)) for x in other_txs], 0) != merkle_link:
-            raise ValueError('merkle_link and other_txs do not match')
-        
-        assert not hash_link['extra_data'], repr(hash_link['extra_data'])
-        
-        self.net = net
-        self.peer = peer
-        self.min_header = min_header
-        self.share_info = share_info
-        self.hash_link = hash_link
-        self.merkle_link = merkle_link
-        self.other_txs = other_txs
-        
-        self.share_data = self.share_info['share_data']
-        self.max_target = self.share_info['max_bits'].target
-        self.target = self.share_info['bits'].target
-        self.timestamp = self.share_info['timestamp']
-        self.previous_hash = self.share_data['previous_share_hash']
-        self.new_script = bitcoin_data.pubkey_hash_to_script2(self.share_data['pubkey_hash'])
-        self.desired_version = 0
-        
-        if self.timestamp > net.SWITCH_TIME:
-            from p2pool import p2p
-            raise p2p.PeerMisbehavingError('peer sent an old-style share with a timestamp after the switch time')
-        
-        self.gentx_hash = check_hash_link(
-            hash_link,
-            pack.IntType(256).pack(bitcoin_data.hash256(self.ref_type.pack(dict(
-                identifier=net.IDENTIFIER,
-                share_info=share_info,
-            )))) + pack.IntType(32).pack(0),
-            self.gentx_before_refhash,
-        )
-        merkle_root = bitcoin_data.check_merkle_link(self.gentx_hash, merkle_link)
-        self.header = dict(min_header, merkle_root=merkle_root)
-        self.pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(self.header))
-        self.header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(self.header))
-        
-        if self.pow_hash > self.target:
-            raise p2p.PeerMisbehavingError('share PoW invalid')
-        
-        if other_txs is not None and not self.pow_hash <= self.header['bits'].target:
-            raise ValueError('other_txs provided when not a block solution')
-        if other_txs is None and self.pow_hash <= self.header['bits'].target:
-            raise ValueError('other_txs not provided when a block solution')
-        
-        self.hash = bitcoin_data.hash256(share_type.pack(self.as_share()))
-        
-        # XXX eww
-        self.time_seen = time.time()
-    
-    def __repr__(self):
-        return '<Share %s>' % (' '.join('%s=%r' % (k, getattr(self, k)) for k in self.__slots__),)
-    
-    def check(self, tracker):
-        share_info, gentx = self.generate_transaction(tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.share_info['bits'].target, self.net)
-        if share_info != self.share_info:
-            raise ValueError('share difficulty invalid')
-        if bitcoin_data.hash256(bitcoin_data.tx_type.pack(gentx)) != self.gentx_hash:
-            raise ValueError('''gentx doesn't match hash_link''')
-    
-    def as_share(self):
-        if not self.pow_hash <= self.header['bits'].target: # share1a
-            return dict(type=2, contents=self.share1a_type.pack(dict(min_header=self.min_header, share_info=self.share_info, hash_link=self.hash_link, merkle_link=self.merkle_link)))
-        else: # share1b
-            return dict(type=3, contents=self.share1b_type.pack(dict(min_header=self.min_header, share_info=self.share_info, hash_link=self.hash_link, other_txs=self.other_txs)))
-    
-    def as_block(self, tracker):
-        if self.other_txs is None:
-            raise ValueError('share does not contain all txs')
-        
-        share_info, gentx = self.generate_transaction(tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.share_info['bits'].target, self.net)
-        assert share_info == self.share_info
-        
-        return dict(header=self.header, txs=[gentx] + self.other_txs)
-
-class NewShare(Share):
-    share_data_type = pack.ComposedType([
-        ('previous_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))),
-        ('coinbase', pack.VarStrType()),
-        ('nonce', pack.IntType(32)),
-        ('pubkey_hash', pack.IntType(160)),
-        ('subsidy', pack.IntType(64)),
-        ('donation', pack.IntType(16)),
-        ('stale_info', pack.IntType(8)), # 0 nothing, 253 orphan, 254 doa
         ('desired_version', pack.VarIntType()),
     ])
     
@@ -277,7 +85,7 @@ class NewShare(Share):
     ])
     
     share_common_type = pack.ComposedType([
-        ('min_header', Share.small_block_header_type),
+        ('min_header', small_block_header_type),
         ('share_info', share_info_type),
         ('ref_merkle_link', pack.ComposedType([
             ('branch', pack.ListType(pack.IntType(256))),
@@ -302,6 +110,8 @@ class NewShare(Share):
         ('share_info', share_info_type),
     ])
     
+    gentx_before_refhash = pack.VarStrType().pack(DONATION_SCRIPT) + pack.IntType(64).pack(0) + pack.VarStrType().pack('\x20' + pack.IntType(256).pack(0))[:2]
+    
     @classmethod
     def generate_transaction(cls, tracker, share_data, block_target, desired_timestamp, desired_target, ref_merkle_link, net):
         previous_share = tracker.shares[share_data['previous_share_hash']] if share_data['previous_share_hash'] is not None else None
@@ -321,7 +131,6 @@ class NewShare(Share):
         weights, total_weight, donation_weight = tracker.get_cumulative_weights(share_data['previous_share_hash'],
             min(height, net.REAL_CHAIN_LENGTH),
             65535*net.SPREAD*bitcoin_data.target_to_average_attempts(block_target),
-            False,
         )
         assert total_weight == sum(weights.itervalues()) + donation_weight, (total_weight, sum(weights.itervalues()) + donation_weight)
         
@@ -423,6 +232,9 @@ class NewShare(Share):
         # XXX eww
         self.time_seen = time.time()
     
+    def __repr__(self):
+        return '<Share %s>' % (' '.join('%s=%r' % (k, getattr(self, k)) for k in self.__slots__),)
+    
     def as_share(self):
         if not self.pow_hash <= self.header['bits'].target: # share1a
             return dict(type=4, contents=self.share1a_type.pack(dict(common=self.common, merkle_link=self.merkle_link)))
@@ -454,22 +266,19 @@ class WeightsSkipList(forest.TrackerSkipList):
     def combine_deltas(self, (share_count1, weights1, total_weight1, total_donation_weight1), (share_count2, weights2, total_weight2, total_donation_weight2)):
         return share_count1 + share_count2, math.add_dicts(weights1, weights2), total_weight1 + total_weight2, total_donation_weight1 + total_donation_weight2
     
-    def initial_solution(self, start, (max_shares, desired_weight, broken_mode)):
+    def initial_solution(self, start, (max_shares, desired_weight)):
         assert desired_weight % 65535 == 0, divmod(desired_weight, 65535)
         return 0, None, 0, 0
     
-    def apply_delta(self, (share_count1, weights_list, total_weight1, total_donation_weight1), (share_count2, weights2, total_weight2, total_donation_weight2), (max_shares, desired_weight, broken_mode)):
+    def apply_delta(self, (share_count1, weights_list, total_weight1, total_donation_weight1), (share_count2, weights2, total_weight2, total_donation_weight2), (max_shares, desired_weight)):
         if total_weight1 + total_weight2 > desired_weight and share_count2 == 1:
             assert (desired_weight - total_weight1) % 65535 == 0
             script, = weights2.iterkeys()
-            if broken_mode:
-                new_weights = dict(script=(desired_weight - total_weight1)//65535*weights2[script]//(total_weight2//65535))
-            else:
-                new_weights = {script: (desired_weight - total_weight1)//65535*weights2[script]//(total_weight2//65535)}
+            new_weights = {script: (desired_weight - total_weight1)//65535*weights2[script]//(total_weight2//65535)}
             return share_count1 + share_count2, (weights_list, new_weights), desired_weight, total_donation_weight1 + (desired_weight - total_weight1)//65535*total_donation_weight2//(total_weight2//65535)
         return share_count1 + share_count2, (weights_list, weights2), total_weight1 + total_weight2, total_donation_weight1 + total_donation_weight2
     
-    def judge(self, (share_count, weights_list, total_weight, total_donation_weight), (max_shares, desired_weight, broken_mode)):
+    def judge(self, (share_count, weights_list, total_weight, total_donation_weight), (max_shares, desired_weight)):
         if share_count > max_shares or total_weight > desired_weight:
             return 1
         elif share_count == max_shares or total_weight == desired_weight:
@@ -477,7 +286,7 @@ class WeightsSkipList(forest.TrackerSkipList):
         else:
             return -1
     
-    def finalize(self, (share_count, weights_list, total_weight, total_donation_weight), (max_shares, desired_weight, broken_mode)):
+    def finalize(self, (share_count, weights_list, total_weight, total_donation_weight), (max_shares, desired_weight)):
         assert share_count <= max_shares and total_weight <= desired_weight
         assert share_count == max_shares or total_weight == desired_weight
         return math.add_dicts(*math.flatten_linked_list(weights_list)), total_weight, total_donation_weight
@@ -688,7 +497,7 @@ def get_average_stale_prop(tracker, share_hash, lookbehind):
     return stales/(lookbehind + stales)
 
 def get_expected_payouts(tracker, best_share_hash, block_target, subsidy, net):
-    weights, total_weight, donation_weight = tracker.get_cumulative_weights(best_share_hash, min(tracker.get_height(best_share_hash), net.REAL_CHAIN_LENGTH), 65535*net.SPREAD*bitcoin_data.target_to_average_attempts(block_target), False)
+    weights, total_weight, donation_weight = tracker.get_cumulative_weights(best_share_hash, min(tracker.get_height(best_share_hash), net.REAL_CHAIN_LENGTH), 65535*net.SPREAD*bitcoin_data.target_to_average_attempts(block_target))
     res = dict((script, subsidy*weight//total_weight) for script, weight in weights.iteritems())
     res[DONATION_SCRIPT] = res.get(DONATION_SCRIPT, 0) + subsidy - sum(res.itervalues())
     return res
@@ -735,7 +544,7 @@ class ShareStore(object):
                             verified_hashes.add(verified_hash)
                         elif type_id == 5:
                             raw_share = share_type.unpack(data_hex.decode('hex'))
-                            if raw_share['type'] in [0, 1]:
+                            if raw_share['type'] in [0, 1, 2, 3]:
                                 continue
                             share = load_share(raw_share, self.net, None)
                             yield 'share', share
index 3180412..7b5721e 100644 (file)
@@ -500,16 +500,8 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
                     mm_data = ''
                     mm_later = []
                 
-                def predict_timestamp():
-                    desired_timestamp = int(time.time() - current_work2.value['clock_offset'])
-                    previous_share = tracker.shares[current_work.value['best_share_hash']] if current_work.value['best_share_hash'] is not None else None
-                    return math.clip(desired_timestamp, (
-                        (previous_share.timestamp + net.SHARE_PERIOD) - (net.SHARE_PERIOD - 1), # = previous_share.timestamp + 1
-                        (previous_share.timestamp + net.SHARE_PERIOD) + (net.SHARE_PERIOD - 1),
-                    )) if previous_share is not None else desired_timestamp
-                new = predict_timestamp() >= net.SWITCH_TIME
-                if new:
-                    share_info, generate_tx = p2pool_data.NewShare.generate_transaction(
+                if True:
+                    share_info, generate_tx = p2pool_data.Share.generate_transaction(
                         tracker=tracker,
                         share_data=dict(
                             previous_share_hash=current_work.value['best_share_hash'],
@@ -531,27 +523,6 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
                         ref_merkle_link=dict(branch=[], index=0),
                         net=net,
                     )
-                else:
-                    share_info, generate_tx = p2pool_data.Share.generate_transaction(
-                        tracker=tracker,
-                        share_data=dict(
-                            previous_share_hash=current_work.value['best_share_hash'],
-                            coinbase=(mm_data + current_work.value['coinbaseflags'])[:100],
-                            nonce=random.randrange(2**31),
-                            pubkey_hash=pubkey_hash,
-                            subsidy=current_work2.value['subsidy'],
-                            donation=math.perfect_round(65535*args.donation_percentage/100),
-                            stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
-                                253 if orphans > orphans_recorded_in_chain else
-                                254 if doas > doas_recorded_in_chain else
-                                0
-                            )(*get_stale_counts()),
-                        ),
-                        block_target=current_work.value['bits'].target,
-                        desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
-                        desired_target=desired_share_target,
-                        net=net,
-                    )
                 
                 target = net.PARENT.SANE_MAX_TARGET
                 if desired_pseudoshare_target is None:
@@ -637,13 +608,10 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
                     if pow_hash <= share_info['bits'].target and header_hash not in received_header_hashes:
                         min_header = dict(header);del min_header['merkle_root']
                         hash_link = p2pool_data.prefix_to_hash_link(packed_generate_tx[:-32-4], p2pool_data.Share.gentx_before_refhash)
-                        if new:
-                            share = p2pool_data.NewShare(net, None, dict(
-                                min_header=min_header, share_info=share_info, hash_link=hash_link,
-                                ref_merkle_link=dict(branch=[], index=0),
-                            ), merkle_link=merkle_link, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
-                        else:
-                            share = p2pool_data.Share(net, None, min_header, share_info, hash_link=hash_link, merkle_link=merkle_link, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
+                        share = p2pool_data.Share(net, None, dict(
+                            min_header=min_header, share_info=share_info, hash_link=hash_link,
+                            ref_merkle_link=dict(branch=[], index=0),
+                        ), merkle_link=merkle_link, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
                         
                         print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
                             request.getUser(),
index 8bc6e85..7b4bad2 100644 (file)
@@ -50,7 +50,7 @@ def get_web_root(tracker, current_work, current_work2, get_current_txouts, datad
     
     def get_users():
         height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
-        weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256, False)
+        weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
         res = {}
         for script in sorted(weights, key=lambda s: weights[s]):
             res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight