added new share type with cutoff date and incremented protocol version 0.10.0
authorForrest Voight <forrest@forre.st>
Tue, 20 Mar 2012 18:55:12 +0000 (14:55 -0400)
committerForrest Voight <forrest@forre.st>
Tue, 20 Mar 2012 20:32:19 +0000 (16:32 -0400)
Changes:
    * shares contain a "desired_version" field that votes to warn other users about upgrades
    * shares contain a "far_share_hash" field that references its 100th parent
        * could be used in the future to implement a query/response check that can verify that all shares in a chain are present and that a competing chain actually has a lot of hash power behind it,
          preventing attacks where p2pool is flooded with shares that it is forced to remember for minutes
    * shares contain a merkle branch to the reference hash at the end of the generation transaction that will let other MM chains use p2pool's MM method
    * the "script" txout script bug is fixed

The switch time matches BIP16's (April 1st) for the Bitcoin chain and is March 27th for the Litecoin chain.

p2pool/data.py
p2pool/main.py
p2pool/networks.py
p2pool/p2p.py

index a106ff0..2655f77 100644 (file)
@@ -51,6 +51,11 @@ def load_share(share, net, peer):
     elif share['type'] == 3:
         share1b = Share.share1b_type.unpack(share['contents'])
         return Share(net, peer, merkle_link=bitcoin_data.calculate_merkle_link([0] + [bitcoin_data.hash256(bitcoin_data.tx_type.pack(x)) for x in share1b['other_txs']], 0), **share1b)
+    elif share['type'] == 4:
+        return NewShare(net, peer, other_txs=None, **NewShare.share1a_type.unpack(share['contents']))
+    elif share['type'] == 5:
+        share1b = NewShare.share1b_type.unpack(share['contents'])
+        return NewShare(net, peer, merkle_link=bitcoin_data.calculate_merkle_link([0] + [bitcoin_data.hash256(bitcoin_data.tx_type.pack(x)) for x in share1b['other_txs']], 0), **share1b)
     else:
         raise ValueError('unknown share type: %r' % (share['type'],))
 
@@ -196,6 +201,10 @@ class Share(object):
         self.new_script = bitcoin_data.pubkey_hash_to_script2(self.share_data['pubkey_hash'])
         self.desired_version = 0
         
+        if self.timestamp > net.SWITCH_TIME:
+            from p2pool import p2p
+            raise p2p.PeerMisbehavingError('peer sent an old-style share with a timestamp after the switch time')
+        
         self.gentx_hash = check_hash_link(
             hash_link,
             pack.IntType(256).pack(bitcoin_data.hash256(self.ref_type.pack(dict(
@@ -247,6 +256,192 @@ class Share(object):
         
         return dict(header=self.header, txs=[gentx] + self.other_txs)
 
+class NewShare(Share):
+    share_data_type = pack.ComposedType([
+        ('previous_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))),
+        ('coinbase', pack.VarStrType()),
+        ('nonce', pack.IntType(32)),
+        ('pubkey_hash', pack.IntType(160)),
+        ('subsidy', pack.IntType(64)),
+        ('donation', pack.IntType(16)),
+        ('stale_info', pack.IntType(8)), # 0 nothing, 253 orphan, 254 doa
+        ('desired_version', pack.VarIntType()),
+    ])
+    
+    share_info_type = pack.ComposedType([
+        ('share_data', share_data_type),
+        ('far_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))),
+        ('max_bits', bitcoin_data.FloatingIntegerType()),
+        ('bits', bitcoin_data.FloatingIntegerType()),
+        ('timestamp', pack.IntType(32)),
+    ])
+    
+    share_common_type = pack.ComposedType([
+        ('min_header', Share.small_block_header_type),
+        ('share_info', share_info_type),
+        ('ref_merkle_link', pack.ComposedType([
+            ('branch', pack.ListType(pack.IntType(256))),
+            ('index', pack.VarIntType()),
+        ])),
+        ('hash_link', hash_link_type),
+    ])
+    share1a_type = pack.ComposedType([
+        ('common', share_common_type),
+        ('merkle_link', pack.ComposedType([
+            ('branch', pack.ListType(pack.IntType(256))),
+            ('index', pack.IntType(0)), # it will always be 0
+        ])),
+    ])
+    share1b_type = pack.ComposedType([
+        ('common', share_common_type),
+        ('other_txs', pack.ListType(bitcoin_data.tx_type)),
+    ])
+    
+    ref_type = pack.ComposedType([
+        ('identifier', pack.FixedStrType(64//8)),
+        ('share_info', share_info_type),
+    ])
+    
+    @classmethod
+    def generate_transaction(cls, tracker, share_data, block_target, desired_timestamp, desired_target, ref_merkle_link, net):
+        previous_share = tracker.shares[share_data['previous_share_hash']] if share_data['previous_share_hash'] is not None else None
+        
+        height, last = tracker.get_height_and_last(share_data['previous_share_hash'])
+        assert height >= net.REAL_CHAIN_LENGTH or last is None
+        if height < net.TARGET_LOOKBEHIND:
+            pre_target3 = net.MAX_TARGET
+        else:
+            attempts_per_second = get_pool_attempts_per_second(tracker, share_data['previous_share_hash'], net.TARGET_LOOKBEHIND, min_work=True, integer=True)
+            pre_target = 2**256//(net.SHARE_PERIOD*attempts_per_second) - 1 if attempts_per_second else 2**256-1
+            pre_target2 = math.clip(pre_target, (previous_share.max_target*9//10, previous_share.max_target*11//10))
+            pre_target3 = math.clip(pre_target2, (0, net.MAX_TARGET))
+        max_bits = bitcoin_data.FloatingInteger.from_target_upper_bound(pre_target3)
+        bits = bitcoin_data.FloatingInteger.from_target_upper_bound(math.clip(desired_target, (pre_target3//10, pre_target3)))
+        
+        weights, total_weight, donation_weight = tracker.get_cumulative_weights(share_data['previous_share_hash'],
+            min(height, net.REAL_CHAIN_LENGTH),
+            65535*net.SPREAD*bitcoin_data.target_to_average_attempts(block_target),
+            False,
+        )
+        assert total_weight == sum(weights.itervalues()) + donation_weight, (total_weight, sum(weights.itervalues()) + donation_weight)
+        
+        amounts = dict((script, share_data['subsidy']*(199*weight)//(200*total_weight)) for script, weight in weights.iteritems()) # 99.5% goes according to weights prior to this share
+        this_script = bitcoin_data.pubkey_hash_to_script2(share_data['pubkey_hash'])
+        amounts[this_script] = amounts.get(this_script, 0) + share_data['subsidy']//200 # 0.5% goes to block finder
+        amounts[DONATION_SCRIPT] = amounts.get(DONATION_SCRIPT, 0) + share_data['subsidy'] - sum(amounts.itervalues()) # all that's left over is the donation weight and some extra satoshis due to rounding
+        
+        if sum(amounts.itervalues()) != share_data['subsidy'] or any(x < 0 for x in amounts.itervalues()):
+            raise ValueError()
+        
+        dests = sorted(amounts.iterkeys(), key=lambda script: (script == DONATION_SCRIPT, amounts[script], script))[-4000:] # block length limit, unlikely to ever be hit
+        
+        share_info = dict(
+            share_data=share_data,
+            far_share_hash=None if last is None and height < 99 else tracker.get_nth_parent_hash(share_data['previous_share_hash'], 99),
+            max_bits=max_bits,
+            bits=bits,
+            timestamp=math.clip(desired_timestamp, (
+                (previous_share.timestamp + net.SHARE_PERIOD) - (net.SHARE_PERIOD - 1), # = previous_share.timestamp + 1
+                (previous_share.timestamp + net.SHARE_PERIOD) + (net.SHARE_PERIOD - 1),
+            )) if previous_share is not None else desired_timestamp,
+        )
+        
+        return share_info, dict(
+            version=1,
+            tx_ins=[dict(
+                previous_output=None,
+                sequence=None,
+                script=share_data['coinbase'].ljust(2, '\x00'),
+            )],
+            tx_outs=[dict(value=amounts[script], script=script) for script in dests if amounts[script]] + [dict(
+                value=0,
+                script='\x20' + cls.get_ref_hash(net, share_info, ref_merkle_link),
+            )],
+            lock_time=0,
+        )
+    
+    @classmethod
+    def get_ref_hash(cls, net, share_info, ref_merkle_link):
+        return pack.IntType(256).pack(bitcoin_data.check_merkle_link(bitcoin_data.hash256(cls.ref_type.pack(dict(
+            identifier=net.IDENTIFIER,
+            share_info=share_info,
+        ))), ref_merkle_link))
+    
+    __slots__ = 'net peer common min_header share_info hash_link merkle_link other_txs hash share_data max_target target timestamp previous_hash new_script desired_version gentx_hash header pow_hash header_hash time_seen'.split(' ')
+    
+    def __init__(self, net, peer, common, merkle_link, other_txs):
+        self.net = net
+        self.peer = peer
+        self.common = common
+        self.min_header = common['min_header']
+        self.share_info = common['share_info']
+        self.hash_link = common['hash_link']
+        self.merkle_link = merkle_link
+        self.other_txs = other_txs
+        
+        if len(self.share_info['share_data']['coinbase']) > 100:
+            raise ValueError('''coinbase too large! %i bytes''' % (len(self.self.share_data['coinbase']),))
+        
+        if len(merkle_link['branch']) > 16:
+            raise ValueError('merkle branch too long!')
+        
+        if p2pool.DEBUG and other_txs is not None and bitcoin_data.calculate_merkle_link([0] + [bitcoin_data.hash256(bitcoin_data.tx_type.pack(x)) for x in other_txs], 0) != merkle_link:
+            raise ValueError('merkle_link and other_txs do not match')
+        
+        assert not self.hash_link['extra_data'], repr(self.hash_link['extra_data'])
+        
+        self.share_data = self.share_info['share_data']
+        self.max_target = self.share_info['max_bits'].target
+        self.target = self.share_info['bits'].target
+        self.timestamp = self.share_info['timestamp']
+        self.previous_hash = self.share_data['previous_share_hash']
+        self.new_script = bitcoin_data.pubkey_hash_to_script2(self.share_data['pubkey_hash'])
+        self.desired_version = self.share_data['desired_version']
+        
+        if self.timestamp < net.SWITCH_TIME:
+            from p2pool import p2p
+            raise p2p.PeerMisbehavingError('peer sent a new-style share with a timestamp before the switch time')
+        
+        self.gentx_hash = check_hash_link(
+            self.hash_link,
+            self.get_ref_hash(net, self.share_info, common['ref_merkle_link']) + pack.IntType(32).pack(0),
+            self.gentx_before_refhash,
+        )
+        merkle_root = bitcoin_data.check_merkle_link(self.gentx_hash, merkle_link)
+        self.header = dict(self.min_header, merkle_root=merkle_root)
+        self.pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(self.header))
+        self.hash = self.header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(self.header))
+        
+        if self.pow_hash > self.target:
+            raise p2p.PeerMisbehavingError('share PoW invalid')
+        
+        if other_txs is not None and not self.pow_hash <= self.header['bits'].target:
+            raise ValueError('other_txs provided when not a block solution')
+        if other_txs is None and self.pow_hash <= self.header['bits'].target:
+            raise ValueError('other_txs not provided when a block solution')
+        
+        # XXX eww
+        self.time_seen = time.time()
+    
+    def as_share(self):
+        if not self.pow_hash <= self.header['bits'].target: # share1a
+            return dict(type=4, contents=self.share1a_type.pack(dict(common=self.common, merkle_link=self.merkle_link)))
+        else: # share1b
+            return dict(type=5, contents=self.share1b_type.pack(dict(common=self.common, other_txs=self.other_txs)))
+    
+    def check(self, tracker):
+        share_info, gentx = self.generate_transaction(tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.share_info['bits'].target, self.common['ref_merkle_link'], self.net)
+        if share_info != self.share_info:
+            raise ValueError('share_info invalid')
+        if bitcoin_data.hash256(bitcoin_data.tx_type.pack(gentx)) != self.gentx_hash:
+            raise ValueError('''gentx doesn't match hash_link''')
+        return gentx # only used by as_block
+    
+    def as_block(self, tracker):
+        if self.other_txs is None:
+            raise ValueError('share does not contain all txs')
+        return dict(header=self.header, txs=[self.check(tracker)] + self.other_txs)
+
 class WeightsSkipList(forest.TrackerSkipList):
     # share_count, weights, total_weight
     
index 838912f..d6843bd 100644 (file)
@@ -501,26 +501,58 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
                     mm_data = ''
                     mm_later = []
                 
-                share_info, generate_tx = p2pool_data.Share.generate_transaction(
-                    tracker=tracker,
-                    share_data=dict(
-                        previous_share_hash=current_work.value['best_share_hash'],
-                        coinbase=(mm_data + current_work.value['coinbaseflags'])[:100],
-                        nonce=random.randrange(2**32),
-                        pubkey_hash=pubkey_hash,
-                        subsidy=current_work2.value['subsidy'],
-                        donation=math.perfect_round(65535*args.donation_percentage/100),
-                        stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
-                            253 if orphans > orphans_recorded_in_chain else
-                            254 if doas > doas_recorded_in_chain else
-                            0
-                        )(*get_stale_counts()),
-                    ),
-                    block_target=current_work.value['bits'].target,
-                    desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
-                    desired_target=desired_share_target,
-                    net=net,
-                )
+                def predict_timestamp():
+                    desired_timestamp = int(time.time() - current_work2.value['clock_offset'])
+                    previous_share = tracker.shares[current_work.value['best_share_hash']] if current_work.value['best_share_hash'] is not None else None
+                    return math.clip(desired_timestamp, (
+                        (previous_share.timestamp + net.SHARE_PERIOD) - (net.SHARE_PERIOD - 1), # = previous_share.timestamp + 1
+                        (previous_share.timestamp + net.SHARE_PERIOD) + (net.SHARE_PERIOD - 1),
+                    )) if previous_share is not None else desired_timestamp
+                new = predict_timestamp() >= net.SWITCH_TIME
+                if new:
+                    share_info, generate_tx = p2pool_data.NewShare.generate_transaction(
+                        tracker=tracker,
+                        share_data=dict(
+                            previous_share_hash=current_work.value['best_share_hash'],
+                            coinbase=(mm_data + current_work.value['coinbaseflags'])[:100],
+                            nonce=random.randrange(2**32),
+                            pubkey_hash=pubkey_hash,
+                            subsidy=current_work2.value['subsidy'],
+                            donation=math.perfect_round(65535*args.donation_percentage/100),
+                            stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
+                                253 if orphans > orphans_recorded_in_chain else
+                                254 if doas > doas_recorded_in_chain else
+                                0
+                            )(*get_stale_counts()),
+                            desired_version=1,
+                        ),
+                        block_target=current_work.value['bits'].target,
+                        desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
+                        desired_target=desired_share_target,
+                        ref_merkle_link=dict(branch=[], index=0),
+                        net=net,
+                    )
+                else:
+                    share_info, generate_tx = p2pool_data.Share.generate_transaction(
+                        tracker=tracker,
+                        share_data=dict(
+                            previous_share_hash=current_work.value['best_share_hash'],
+                            coinbase=(mm_data + current_work.value['coinbaseflags'])[:100],
+                            nonce=random.randrange(2**32),
+                            pubkey_hash=pubkey_hash,
+                            subsidy=current_work2.value['subsidy'],
+                            donation=math.perfect_round(65535*args.donation_percentage/100),
+                            stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
+                                253 if orphans > orphans_recorded_in_chain else
+                                254 if doas > doas_recorded_in_chain else
+                                0
+                            )(*get_stale_counts()),
+                        ),
+                        block_target=current_work.value['bits'].target,
+                        desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
+                        desired_target=desired_share_target,
+                        net=net,
+                    )
                 
                 target = net.PARENT.SANE_MAX_TARGET
                 if desired_pseudoshare_target is None:
@@ -606,7 +638,13 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
                     if pow_hash <= share_info['bits'].target:
                         min_header = dict(header);del min_header['merkle_root']
                         hash_link = p2pool_data.prefix_to_hash_link(packed_generate_tx[:-32-4], p2pool_data.Share.gentx_before_refhash)
-                        share = p2pool_data.Share(net, None, min_header, share_info, hash_link=hash_link, merkle_link=merkle_link, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
+                        if new:
+                            share = p2pool_data.NewShare(net, None, dict(
+                                min_header=min_header, share_info=share_info, hash_link=hash_link,
+                                ref_merkle_link=dict(branch=[], index=0),
+                            ), merkle_link=merkle_link, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
+                        else:
+                            share = p2pool_data.Share(net, None, min_header, share_info, hash_link=hash_link, merkle_link=merkle_link, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
                         
                         print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
                             request.getUser(),
@@ -772,7 +810,7 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
                         
                         desired_version_counts = p2pool_data.get_desired_version_counts(tracker, current_work.value['best_share_hash'], min(720, height))
                         majority_desired_version = max(desired_version_counts, key=lambda k: desired_version_counts[k])
-                        if majority_desired_version not in [0]:
+                        if majority_desired_version not in [0, 1]:
                             print >>sys.stderr, '#'*40
                             print >>sys.stderr, '>>> WARNING: A MAJORITY OF SHARES CONTAIN A VOTE FOR AN UNSUPPORTED SHARE IMPLEMENTATION! (v%i with %i%% support)' % (
                                 majority_desired_version, 100*desired_version_counts[majority_desired_version]/sum(desired_version_counts.itervalues()))
index 06b5458..12d5367 100644 (file)
@@ -24,6 +24,7 @@ nets = dict(
         BOOTSTRAP_ADDRS='74.220.242.6:9334 93.97.192.93 66.90.73.83 67.83.108.0 219.84.64.174 24.167.17.248 109.74.195.142 83.211.86.49 89.78.212.44 94.23.34.145 168.7.116.243 72.14.191.28 94.174.40.189:9344'.split(' '),
         ANNOUNCE_CHANNEL='#p2pool',
         VERSION_CHECK=lambda (major, minor, patch), temp_work: major >= 7 or (major == 6 and patch >= 3) or (major == 5 and minor >= 4) or '/P2SH/' in temp_work['coinbaseflags'],
+        SWITCH_TIME=1333238400,
     ),
     bitcoin_testnet=math.Object(
         PARENT=networks.nets['bitcoin_testnet'],
@@ -41,6 +42,7 @@ nets = dict(
         BOOTSTRAP_ADDRS='72.14.191.28'.split(' '),
         ANNOUNCE_CHANNEL='#p2pool-alt',
         VERSION_CHECK=lambda (major, minor, patch), temp_work: major >= 7 or (major == 6 and patch >= 3) or (major == 5 and minor >= 4) or '/P2SH/' in temp_work['coinbaseflags'],
+        SWITCH_TIME=1333238400-60*60*24*7,
     ),
     
     litecoin=math.Object(
@@ -59,6 +61,7 @@ nets = dict(
         BOOTSTRAP_ADDRS='76.26.53.101 124.205.120.178 190.195.79.161 173.167.113.73 82.161.65.210 67.83.108.0 78.101.67.239 78.100.161.252 87.58.117.233 78.100.162.223 216.239.45.4 78.101.131.221 72.14.191.28 97.81.163.217 69.126.183.240 219.84.64.174 78.101.119.27 89.211.228.244 178.152.122.30 172.16.0.3 76.26.53.101:51319'.split(' '),
         ANNOUNCE_CHANNEL='#p2pool-alt',
         VERSION_CHECK=lambda (major, minor, patch), temp_work: True,
+        SWITCH_TIME=1333238400-60*60*24*5,
     ),
     litecoin_testnet=math.Object(
         PARENT=networks.nets['litecoin_testnet'],
@@ -76,6 +79,7 @@ nets = dict(
         BOOTSTRAP_ADDRS='72.14.191.28'.split(' '),
         ANNOUNCE_CHANNEL='#p2pool-alt',
         VERSION_CHECK=lambda (major, minor, patch), temp_work: True,
+        SWITCH_TIME=1333238400-60*60*24*7,
     ),
 )
 for net_name, net in nets.iteritems():
index 31a6d8c..d6b27d5 100644 (file)
@@ -29,7 +29,7 @@ class Protocol(p2protocol.Protocol):
         self.addr = self.transport.getPeer().host, self.transport.getPeer().port
         
         self.send_version(
-            version=3,
+            version=4,
             services=0,
             addr_to=dict(
                 services=0,