cleaned up p2pool.data
[p2pool.git] / p2pool / data.py
index 6c695f6..18454c9 100644 (file)
 from __future__ import division
 
+import hashlib
+import os
 import random
 import time
-import os
 
 from twisted.python import log
 
 import p2pool
 from p2pool import skiplists
-from p2pool.bitcoin import data as bitcoin_data, script
-from p2pool.util import math, forest
+from p2pool.bitcoin import data as bitcoin_data, script, sha256
+from p2pool.util import math, forest, pack
+
+# hashlink
+
+hash_link_type = pack.ComposedType([
+    ('state', pack.FixedStrType(32)),
+    ('extra_data', pack.FixedStrType(0)), # bit of a hack, but since the donation script is at the end, const_ending is long enough to always make this empty
+    ('length', pack.VarIntType()),
+])
 
+def prefix_to_hash_link(prefix, const_ending=''):
+    assert prefix.endswith(const_ending), (prefix, const_ending)
+    x = sha256.sha256(prefix)
+    return dict(state=x.state, extra_data=x.buf[:max(0, len(x.buf)-len(const_ending))], length=x.length//8)
+
+def check_hash_link(hash_link, data, const_ending=''):
+    extra_length = hash_link['length'] % (512//8)
+    assert len(hash_link['extra_data']) == max(0, extra_length - len(const_ending))
+    extra = (hash_link['extra_data'] + const_ending)[len(hash_link['extra_data']) + len(const_ending) - extra_length:]
+    assert len(extra) == extra_length
+    return pack.IntType(256).unpack(hashlib.sha256(sha256.sha256(data, (hash_link['state'], extra, 8*hash_link['length'])).digest()).digest())
+
+# shares
+
+small_block_header_type = pack.ComposedType([
+    ('version', pack.VarIntType()), # XXX must be constrained to 32 bits
+    ('previous_block', pack.PossiblyNoneType(0, pack.IntType(256))),
+    ('timestamp', pack.IntType(32)),
+    ('bits', bitcoin_data.FloatingIntegerType()),
+    ('nonce', pack.IntType(32)),
+])
 
-share_data_type = bitcoin_data.ComposedType([
-    ('previous_share_hash', bitcoin_data.PossiblyNoneType(0, bitcoin_data.HashType())),
-    ('coinbase', bitcoin_data.VarStrType()),
-    ('nonce', bitcoin_data.VarStrType()),
-    ('new_script', bitcoin_data.VarStrType()),
-    ('subsidy', bitcoin_data.StructType('<Q')),
-    ('donation', bitcoin_data.StructType('<H')),
-    ('stale_info', bitcoin_data.StructType('<B')), # 0 nothing, 253 orphan, 254 doa. previously: 254*perfect_round(my_stale_prop), None if no shares
+share_data_type = pack.ComposedType([
+    ('previous_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))),
+    ('coinbase', pack.VarStrType()),
+    ('nonce', pack.IntType(32)),
+    ('pubkey_hash', pack.IntType(160)),
+    ('subsidy', pack.IntType(64)),
+    ('donation', pack.IntType(16)),
+    ('stale_info', pack.IntType(8)), # 0 nothing, 253 orphan, 254 doa
 ])
 
-share_info_type = bitcoin_data.ComposedType([
+share_info_type = pack.ComposedType([
     ('share_data', share_data_type),
+    ('max_bits', bitcoin_data.FloatingIntegerType()),
     ('bits', bitcoin_data.FloatingIntegerType()),
-    ('timestamp', bitcoin_data.StructType('<I')),
+    ('timestamp', pack.IntType(32)),
 ])
 
-share1a_type = bitcoin_data.ComposedType([
-    ('header', bitcoin_data.block_header_type),
+share1a_type = pack.ComposedType([
+    ('min_header', small_block_header_type),
     ('share_info', share_info_type),
+    ('hash_link', hash_link_type),
     ('merkle_branch', bitcoin_data.merkle_branch_type),
 ])
 
-share1b_type = bitcoin_data.ComposedType([
-    ('header', bitcoin_data.block_header_type),
+share1b_type = pack.ComposedType([
+    ('min_header', small_block_header_type),
     ('share_info', share_info_type),
-    ('other_txs', bitcoin_data.ListType(bitcoin_data.tx_type)),
+    ('hash_link', hash_link_type),
+    ('other_txs', pack.ListType(bitcoin_data.tx_type)),
 ])
 
+
 # type:
-# 0: share1a
-# 1: share1b
+# 2: share1a
+# 3: share1b
+
+share_type = pack.ComposedType([
+    ('type', pack.VarIntType()),
+    ('contents', pack.VarStrType()),
+])
+
+
+def get_pool_attempts_per_second(tracker, previous_share_hash, dist, min_work=False):
+    assert dist >= 2
+    near = tracker.shares[previous_share_hash]
+    far = tracker.shares[tracker.get_nth_parent_hash(previous_share_hash, dist - 1)]
+    attempts = tracker.get_work(near.hash) - tracker.get_work(far.hash) if not min_work else tracker.get_delta(near.hash).min_work - tracker.get_delta(far.hash).min_work
+    time = near.timestamp - far.timestamp
+    if time <= 0:
+        time = 1
+    return attempts//time
 
-share_type = bitcoin_data.ComposedType([
-    ('type', bitcoin_data.VarIntType()),
-    ('contents', bitcoin_data.VarStrType()),
+def get_average_stale_prop(tracker, share_hash, lookbehind):
+    stales = sum(1 for share in tracker.get_chain(share_hash, lookbehind) if share.share_data['stale_info'] in [253, 254])
+    return stales/(lookbehind + stales)
+
+DONATION_SCRIPT = '4104ffd03de44a6e11b9917f3a29f9443283d9871c9d743ef30d5eddcd37094b64d1b3d8090496b53256786bf5c82932ec23c3b74d9f05a6f95a8b5529352656664bac'.decode('hex')
+
+ref_type = pack.ComposedType([
+    ('identifier', pack.FixedStrType(64//8)),
+    ('share_info', share_info_type),
 ])
 
+gentx_before_refhash = pack.VarStrType().pack(DONATION_SCRIPT) + pack.IntType(64).pack(0) + pack.VarStrType().pack('\x20' + pack.IntType(256).pack(0))[:2]
+
+def generate_transaction(tracker, share_data, block_target, desired_timestamp, desired_target, net):
+    previous_share = tracker.shares[share_data['previous_share_hash']] if share_data['previous_share_hash'] is not None else None
+    
+    height, last = tracker.get_height_and_last(share_data['previous_share_hash'])
+    assert height >= net.REAL_CHAIN_LENGTH or last is None
+    if height < net.TARGET_LOOKBEHIND:
+        pre_target3 = net.MAX_TARGET
+    else:
+        attempts_per_second = get_pool_attempts_per_second(tracker, share_data['previous_share_hash'], net.TARGET_LOOKBEHIND, min_work=True)
+        pre_target = 2**256//(net.SHARE_PERIOD*attempts_per_second) - 1
+        pre_target2 = math.clip(pre_target, (previous_share.max_target*9//10, previous_share.max_target*11//10))
+        pre_target3 = math.clip(pre_target2, (0, net.MAX_TARGET))
+    max_bits = bitcoin_data.FloatingInteger.from_target_upper_bound(pre_target3)
+    bits = bitcoin_data.FloatingInteger.from_target_upper_bound(math.clip(desired_target, (pre_target3//10, pre_target3)))
+    
+    weights, total_weight, donation_weight = tracker.get_cumulative_weights(share_data['previous_share_hash'],
+        min(height, net.REAL_CHAIN_LENGTH),
+        65535*net.SPREAD*bitcoin_data.target_to_average_attempts(block_target),
+    )
+    assert total_weight == sum(weights.itervalues()) + donation_weight, (total_weight, sum(weights.itervalues()) + donation_weight)
+    
+    amounts = dict((script, share_data['subsidy']*(199*weight)//(200*total_weight)) for script, weight in weights.iteritems()) # 99.5% goes according to weights prior to this share
+    this_script = bitcoin_data.pubkey_hash_to_script2(share_data['pubkey_hash'])
+    amounts[this_script] = amounts.get(this_script, 0) + share_data['subsidy']//200 # 0.5% goes to block finder
+    amounts[DONATION_SCRIPT] = amounts.get(DONATION_SCRIPT, 0) + share_data['subsidy'] - sum(amounts.itervalues()) # all that's left over is the donation weight and some extra satoshis due to rounding
+    
+    if sum(amounts.itervalues()) != share_data['subsidy'] or any(x < 0 for x in amounts.itervalues()):
+        raise ValueError()
+    
+    dests = sorted(amounts.iterkeys(), key=lambda script: (script == DONATION_SCRIPT, amounts[script], script))[-4000:] # block length limit, unlikely to ever be hit
+    
+    share_info = dict(
+        share_data=share_data,
+        max_bits=max_bits,
+        bits=bits,
+        timestamp=math.clip(desired_timestamp, (
+            (previous_share.timestamp + net.SHARE_PERIOD) - (net.SHARE_PERIOD - 1), # = previous_share.timestamp + 1
+            (previous_share.timestamp + net.SHARE_PERIOD) + (net.SHARE_PERIOD - 1),
+        )) if previous_share is not None else desired_timestamp,
+    )
+    
+    return share_info, dict(
+        version=1,
+        tx_ins=[dict(
+            previous_output=None,
+            sequence=None,
+            script=share_data['coinbase'].ljust(2, '\x00'),
+        )],
+        tx_outs=[dict(value=amounts[script], script=script) for script in dests if amounts[script]] + [dict(
+            value=0,
+            script='\x20' + pack.IntType(256).pack(bitcoin_data.hash256(ref_type.pack(dict(
+                identifier=net.IDENTIFIER,
+                share_info=share_info,
+            )))),
+        )],
+        lock_time=0,
+    )
+
+def get_expected_payouts(tracker, best_share_hash, block_target, subsidy, net):
+    weights, total_weight, donation_weight = tracker.get_cumulative_weights(best_share_hash, min(tracker.get_height(best_share_hash), net.REAL_CHAIN_LENGTH), 65535*net.SPREAD*bitcoin_data.target_to_average_attempts(block_target))
+    res = dict((script, subsidy*weight//total_weight) for script, weight in weights.iteritems())
+    res[DONATION_SCRIPT] = res.get(DONATION_SCRIPT, 0) + subsidy - sum(res.itervalues())
+    return res
+
 class Share(object):
-    __slots__ = 'header previous_block share_info merkle_branch other_txs timestamp share_data new_script subsidy previous_hash previous_share_hash target nonce pow_hash header_hash hash time_seen peer donation net'.split(' ')
+    __slots__ = 'net min_header share_info hash_link merkle_branch other_txs hash share_data max_target target timestamp previous_hash new_script gentx_hash header pow_hash header_hash time_seen peer'.split(' ')
     
     @classmethod
-    def from_share(cls, share, net):
-        if share['type'] == 0:
-            res = cls.from_share1a(share1a_type.unpack(share['contents']), net)
-            if not (res.pow_hash > res.header['bits'].target):
-                raise ValueError('invalid share type')
-            return res
-        elif share['type'] == 1:
-            res = cls.from_share1b(share1b_type.unpack(share['contents']), net)
-            if not (res.pow_hash <= res.header['bits'].target):
-                raise ValueError('invalid share type')
-            return res
+    def from_share(cls, share, net, peer):
+        if share['type'] == 2:
+            return cls(net, peer, other_txs=None, **share1a_type.unpack(share['contents']))
+        elif share['type'] == 3:
+            share1b = share1b_type.unpack(share['contents'])
+            return cls(net, peer, merkle_branch=bitcoin_data.calculate_merkle_branch([0] + [bitcoin_data.hash256(bitcoin_data.tx_type.pack(x)) for x in share1b['other_txs']], 0), **share1b)
         else:
             raise ValueError('unknown share type: %r' % (share['type'],))
     
-    @classmethod
-    def from_share1a(cls, share1a, net):
-        return cls(net, **share1a)
-    
-    @classmethod
-    def from_share1b(cls, share1b, net):
-        return cls(net, **share1b)
-    
-    def __init__(self, net, header, share_info, merkle_branch=None, other_txs=None):
-        self.net = net
-        
-        if merkle_branch is None and other_txs is None:
-            raise ValueError('need either merkle_branch or other_txs')
-        if other_txs is not None:
-            new_merkle_branch = bitcoin_data.calculate_merkle_branch([0] + map(bitcoin_data.tx_type.hash256, other_txs), 0)
-            if merkle_branch is not None:
-                if merke_branch != new_merkle_branch:
-                    raise ValueError('invalid merkle_branch and other_txs')
-            merkle_branch = new_merkle_branch
+    def __init__(self, net, peer, min_header, share_info, hash_link, merkle_branch, other_txs):
+        if len(share_info['share_data']['coinbase']) > 100:
+            raise ValueError('''coinbase too large! %i bytes''' % (len(self.share_data['coinbase']),))
         
         if len(merkle_branch) > 16:
             raise ValueError('merkle_branch too long!')
         
-        self.header = header
-        self.previous_block = header['previous_block']
+        if p2pool.DEBUG and other_txs is not None and bitcoin_data.calculate_merkle_branch([0] + [bitcoin_data.hash256(bitcoin_data.tx_type.pack(x)) for x in other_txs], 0) != merkle_branch:
+            raise ValueError('merkle_branch and other_txs do not match')
+        
+        assert not hash_link['extra_data'], repr(hash_link['extra_data'])
+        
+        self.net = net
+        self.peer = peer
+        self.min_header = min_header
         self.share_info = share_info
+        self.hash_link = hash_link
         self.merkle_branch = merkle_branch
+        self.other_txs = other_txs
         
         self.share_data = self.share_info['share_data']
+        self.max_target = self.share_info['max_bits'].target
         self.target = self.share_info['bits'].target
         self.timestamp = self.share_info['timestamp']
-        
-        self.new_script = self.share_data['new_script']
-        self.subsidy = self.share_data['subsidy']
-        self.donation = self.share_data['donation']
-        
-        if len(self.new_script) > 100:
-            raise ValueError('new_script too long!')
-        
-        self.previous_hash = self.previous_share_hash = self.share_data['previous_share_hash']
-        self.nonce = self.share_data['nonce']
-        
-        if len(self.nonce) > 100:
-            raise ValueError('nonce too long!')
-        
-        if len(self.share_data['coinbase']) > 100:
-            raise ValueError('''coinbase too large! %i bytes''' % (len(self.share_data['coinbase']),))
-        
-        self.pow_hash = net.PARENT.POW_FUNC(header)
-        self.header_hash = bitcoin_data.block_header_type.hash256(header)
-        
-        self.hash = share1a_type.hash256(self.as_share1a())
+        self.previous_hash = self.share_data['previous_share_hash']
+        self.new_script = bitcoin_data.pubkey_hash_to_script2(self.share_data['pubkey_hash'])
+        
+        self.gentx_hash = check_hash_link(
+            hash_link,
+            pack.IntType(256).pack(bitcoin_data.hash256(ref_type.pack(dict(
+                identifier=net.IDENTIFIER,
+                share_info=share_info,
+            )))) + pack.IntType(32).pack(0),
+            gentx_before_refhash,
+        )
+        merkle_root = bitcoin_data.check_merkle_branch(self.gentx_hash, 0, merkle_branch)
+        self.header = dict(min_header, merkle_root=merkle_root)
+        self.pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(self.header))
+        self.header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(self.header))
         
         if self.pow_hash > self.target:
             print 'hash %x' % self.pow_hash
             print 'targ %x' % self.target
             raise ValueError('not enough work!')
         
-        self.other_txs = other_txs if self.pow_hash <= self.header['bits'].target else None
+        if other_txs is not None and not self.pow_hash <= self.header['bits'].target:
+            raise ValueError('other_txs provided when not a block solution')
+        if other_txs is None and self.pow_hash <= self.header['bits'].target:
+            raise ValueError('other_txs not provided when a block solution')
+        
+        self.hash = bitcoin_data.hash256(share_type.pack(self.as_share()))
         
         # XXX eww
         self.time_seen = time.time()
-        self.peer = None
     
     def __repr__(self):
         return '<Share %s>' % (' '.join('%s=%r' % (k, getattr(self, k)) for k in self.__slots__),)
     
     def check(self, tracker):
-        if script.get_sigop_count(self.new_script) > 1:
-            raise ValueError('too many sigops!')
-        
-        share_info, gentx = generate_transaction(tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.net)
+        share_info, gentx = generate_transaction(tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.share_info['bits'].target, self.net)
         if share_info != self.share_info:
             raise ValueError('share difficulty invalid')
-        
-        if bitcoin_data.check_merkle_branch(bitcoin_data.tx_type.hash256(gentx), 0, self.merkle_branch) != self.header['merkle_root']:
-            raise ValueError('''gentx doesn't match header via merkle_branch''')
+        if bitcoin_data.hash256(bitcoin_data.tx_type.pack(gentx)) != self.gentx_hash:
+            raise ValueError('''gentx doesn't match hash_link''')
     
     def as_share(self):
-        if self.pow_hash > self.header['bits'].target: # share1a
-            return dict(type=0, contents=share1a_type.pack(self.as_share1a()))
-        elif self.pow_hash <= self.header['bits'].target: # share1b
-            return dict(type=1, contents=share1b_type.pack(self.as_share1b()))
-        else:
-            raise AssertionError()
-    
-    def as_share1a(self):
-        return dict(header=self.header, share_info=self.share_info, merkle_branch=self.merkle_branch)
-    
-    def as_share1b(self):
-        if self.other_txs is None:
-            raise ValueError('share does not contain all txs')
-        
-        return dict(header=self.header, share_info=self.share_info, other_txs=self.other_txs)
+        if not self.pow_hash <= self.header['bits'].target: # share1a
+            return dict(type=2, contents=share1a_type.pack(dict(min_header=self.min_header, share_info=self.share_info, hash_link=self.hash_link, merkle_branch=self.merkle_branch)))
+        else: # share1b
+            return dict(type=3, contents=share1b_type.pack(dict(min_header=self.min_header, share_info=self.share_info, hash_link=self.hash_link, other_txs=self.other_txs)))
     
     def as_block(self, tracker):
         if self.other_txs is None:
             raise ValueError('share does not contain all txs')
         
-        share_info, gentx = generate_transaction(tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.net)
+        share_info, gentx = generate_transaction(tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.share_info['bits'].target, self.net)
         assert share_info == self.share_info
         
         return dict(header=self.header, txs=[gentx] + self.other_txs)
 
-def get_pool_attempts_per_second(tracker, previous_share_hash, dist):
-    assert dist >= 2
-    near = tracker.shares[previous_share_hash]
-    far = tracker.shares[tracker.get_nth_parent_hash(previous_share_hash, dist - 1)]
-    attempts = tracker.get_work(near.hash) - tracker.get_work(far.hash)
-    time = near.timestamp - far.timestamp
-    if time <= 0:
-        time = 1
-    return attempts//time
-
-def get_average_stale_prop(tracker, share_hash, lookbehind):
-    def stales_per_share(share):
-        if share.share_data['stale_info'] == 253: # orphan
-            return 1
-        elif share.share_data['stale_info'] == 254: # doa
-            return 1
-        elif share.share_data['stale_info'] == 0:
-            return 0
-        elif share.share_data['stale_info'] == 255: # temporary hack until everyone uses new-style stale data
-            return 0
-        else:
-            return 1/(254/share.share_data['stale_info'] - 1) # converts stales/shares to stales/nonstales
-            # 0 and 254 case are taken care of above and this will soon be removed anyway
-    stales = sum(stales_per_share(share) for share in tracker.get_chain(share_hash, lookbehind))
-    return stales/(lookbehind + stales)
-
-def generate_transaction(tracker, share_data, block_target, desired_timestamp, net):
-    previous_share_hash = share_data['previous_share_hash']
-    new_script = share_data['new_script']
-    subsidy = share_data['subsidy']
-    donation = share_data['donation']
-    assert 0 <= donation <= 65535
-    
-    if len(share_data['coinbase']) > 100:
-        raise ValueError('coinbase too long!')
-    
-    previous_share = tracker.shares[previous_share_hash] if previous_share_hash is not None else None
-    
-    chain_length = getattr(net, 'REAL_CHAIN_LENGTH_FUNC', lambda _: net.REAL_CHAIN_LENGTH)(previous_share.timestamp if previous_share is not None else None)
-    
-    height, last = tracker.get_height_and_last(previous_share_hash)
-    assert height >= chain_length or last is None
-    if height < net.TARGET_LOOKBEHIND:
-        bits = bitcoin_data.FloatingInteger.from_target_upper_bound(net.MAX_TARGET)
-    else:
-        attempts_per_second = get_pool_attempts_per_second(tracker, previous_share_hash, net.TARGET_LOOKBEHIND)
-        pre_target = 2**256//(net.SHARE_PERIOD*attempts_per_second) - 1
-        pre_target2 = math.clip(pre_target, (previous_share.target*9//10, previous_share.target*11//10))
-        pre_target3 = math.clip(pre_target2, (0, net.MAX_TARGET))
-        bits = bitcoin_data.FloatingInteger.from_target_upper_bound(pre_target3)
-    
-    attempts_to_block = bitcoin_data.target_to_average_attempts(block_target)
-    max_att = net.SPREAD * attempts_to_block
-    
-    this_att = min(bitcoin_data.target_to_average_attempts(bits.target), max_att)
-    other_weights, other_total_weight, other_donation_weight = tracker.get_cumulative_weights(previous_share_hash, min(height, chain_length), 65535*max(0, max_att - this_att))
-    assert other_total_weight == sum(other_weights.itervalues()) + other_donation_weight, (other_total_weight, sum(other_weights.itervalues()) + other_donation_weight)
-    weights, total_weight, donation_weight = math.add_dicts({new_script: this_att*(65535-donation)}, other_weights), this_att*65535 + other_total_weight, this_att*donation + other_donation_weight
-    assert total_weight == sum(weights.itervalues()) + donation_weight, (total_weight, sum(weights.itervalues()) + donation_weight)
-    
-    SCRIPT = '4104ffd03de44a6e11b9917f3a29f9443283d9871c9d743ef30d5eddcd37094b64d1b3d8090496b53256786bf5c82932ec23c3b74d9f05a6f95a8b5529352656664bac'.decode('hex')
-    
-    # 1 satoshi is always donated so that a list of p2pool generated blocks can be easily found by looking at the donation address
-    amounts = dict((script, (subsidy-1)*(199*weight)//(200*total_weight)) for (script, weight) in weights.iteritems())
-    amounts[new_script] = amounts.get(new_script, 0) + (subsidy-1)//200
-    amounts[SCRIPT] = amounts.get(SCRIPT, 0) + (subsidy-1)*(199*donation_weight)//(200*total_weight)
-    amounts[SCRIPT] = amounts.get(SCRIPT, 0) + subsidy - sum(amounts.itervalues()) # collect any extra satoshis :P
-    
-    if sum(amounts.itervalues()) != subsidy:
-        raise ValueError()
-    if any(x < 0 for x in amounts.itervalues()):
-        raise ValueError()
-    
-    dests = sorted(amounts.iterkeys(), key=lambda script: (amounts[script], script))
-    dests = dests[-4000:] # block length limit, unlikely to ever be hit
-    
-    share_info = dict(
-        share_data=share_data,
-        bits=bits,
-        timestamp=math.clip(desired_timestamp, (previous_share.timestamp - 60, previous_share.timestamp + 60)) if previous_share is not None else desired_timestamp,
-    )
-    
-    return share_info, dict(
-        version=1,
-        tx_ins=[dict(
-            previous_output=None,
-            sequence=None,
-            script=share_data['coinbase'].ljust(2, '\x00'),
-        )],
-        tx_outs=[dict(value=0, script='\x20' + bitcoin_data.HashType().pack(share_info_type.hash256(share_info)))] + [dict(value=amounts[script], script=script) for script in dests if amounts[script]],
-        lock_time=0,
-    )
-
-
 class OkayTracker(forest.Tracker):
-    def __init__(self, net):
-        forest.Tracker.__init__(self)
+    def __init__(self, net, my_share_hashes, my_doa_share_hashes):
+        forest.Tracker.__init__(self, delta_type=forest.get_attributedelta_type(dict(forest.AttributeDelta.attrs,
+            work=lambda share: bitcoin_data.target_to_average_attempts(share.target),
+            min_work=lambda share: bitcoin_data.target_to_average_attempts(share.max_target),
+        )))
         self.net = net
-        self.verified = forest.Tracker()
+        self.verified = forest.Tracker(delta_type=forest.get_attributedelta_type(dict(forest.AttributeDelta.attrs,
+            work=lambda share: bitcoin_data.target_to_average_attempts(share.target),
+            my_count=lambda share: 1 if share.hash in my_share_hashes else 0,
+            my_doa_count=lambda share: 1 if share.hash in my_doa_share_hashes else 0,
+            my_orphan_announce_count=lambda share: 1 if share.hash in my_share_hashes and share.share_data['stale_info'] == 253 else 0,
+            my_dead_announce_count=lambda share: 1 if share.hash in my_share_hashes and share.share_data['stale_info'] == 254 else 0,
+        )))
         self.verified.get_nth_parent_hash = self.get_nth_parent_hash # self is a superset of self.verified
         
         self.get_cumulative_weights = skiplists.WeightsSkipList(self)
@@ -289,7 +297,7 @@ class OkayTracker(forest.Tracker):
             self.verified.add(share)
             return True
     
-    def think(self, ht, previous_block):
+    def think(self, block_rel_height_func, previous_block, bits):
         desired = set()
         
         # O(len(self.heads))
@@ -308,7 +316,12 @@ class OkayTracker(forest.Tracker):
                     bads.add(share.hash)
             else:
                 if last is not None:
-                    desired.add((self.shares[random.choice(list(self.reverse_shares[last]))].peer, last))
+                    desired.add((
+                        self.shares[random.choice(list(self.reverse_shares[last]))].peer,
+                        last,
+                        max(x.timestamp for x in self.get_chain(head, min(head_height, 5))),
+                        min(x.target for x in self.get_chain(head, min(head_height, 5))),
+                    ))
         for bad in bads:
             assert bad not in self.verified.shares
             assert bad in self.heads
@@ -329,41 +342,41 @@ class OkayTracker(forest.Tracker):
                 if not self.attempt_verify(share):
                     break
             if head_height < self.net.CHAIN_LENGTH and last_last_hash is not None:
-                desired.add((self.verified.shares[random.choice(list(self.verified.reverse_shares[last_hash]))].peer, last_last_hash))
-        if p2pool.DEBUG:
-            print len(self.verified.tails), "tails:"
-            for x in self.verified.tails:
-                print format_hash(x), self.score(max(self.verified.tails[x], key=self.verified.get_height), ht)
+                desired.add((
+                    self.shares[random.choice(list(self.verified.reverse_shares[last_hash]))].peer,
+                    last_last_hash,
+                    max(x.timestamp for x in self.get_chain(head, min(head_height, 5))),
+                    min(x.target for x in self.get_chain(head, min(head_height, 5))),
+                ))
         
         # decide best tree
-        best_tail = max(self.verified.tails, key=lambda h: self.score(max(self.verified.tails[h], key=self.verified.get_height), ht)) if self.verified.tails else None
+        decorated_tails = sorted((self.score(max(self.verified.tails[tail_hash], key=self.verified.get_height), block_rel_height_func), tail_hash) for tail_hash in self.verified.tails) # XXX using get_height here is quite possibly incorrect and vulnerable
+        if p2pool.DEBUG:
+            print len(decorated_tails), 'tails:'
+            for score, tail_hash in decorated_tails:
+                print format_hash(tail_hash), score
+        best_tail_score, best_tail = decorated_tails[-1] if decorated_tails else (None, None)
+        
         # decide best verified head
-        scores = sorted(self.verified.tails.get(best_tail, []), key=lambda h: (
+        decorated_heads = sorted(((
             self.verified.get_work(self.verified.get_nth_parent_hash(h, min(5, self.verified.get_height(h)))),
-            #self.verified.shares[h].peer is None,
-            ht.get_height_rel_highest(self.verified.shares[h].previous_block),
-            -self.verified.shares[h].time_seen
-        ))
-        
-        
+            #self.shares[h].peer is None,
+            self.shares[h].pow_hash <= self.shares[h].header['bits'].target, # is block solution
+            (self.shares[h].header['previous_block'], self.shares[h].header['bits']) == (previous_block, bits) or self.shares[h].peer is None,
+            -self.shares[h].time_seen,
+        ), h) for h in self.verified.tails.get(best_tail, []))
         if p2pool.DEBUG:
-            print len(self.verified.tails), "chain tails and", len(self.verified.tails.get(best_tail, [])), 'chain heads. Top 10 heads:'
-            if len(scores) > 10:
-                print '    ...'
-            for h in scores[-10:]:
-                print '   ', format_hash(h), format_hash(self.verified.shares[h].previous_hash), (
-                    self.verified.get_work(self.verified.get_nth_parent_hash(h, min(5, self.verified.get_height(h)))),
-                    self.verified.shares[h].peer is None,
-                    ht.get_height_rel_highest(self.verified.shares[h].previous_block),
-                    -self.verified.shares[h].time_seen
-                )
+            print len(decorated_heads), 'heads. Top 10:'
+            for score, head_hash in decorated_heads[-10:]:
+                print '   ', format_hash(head_hash), format_hash(self.shares[head_hash].previous_hash), score
+        best_head_score, best = decorated_heads[-1] if decorated_heads else (None, None)
         
         # eat away at heads
-        if scores:
+        if decorated_heads:
             for i in xrange(1000):
                 to_remove = set()
                 for share_hash, tail in self.heads.iteritems():
-                    if share_hash in scores[-5:]:
+                    if share_hash in [head_hash for score, head_hash in decorated_heads[-5:]]:
                         #print 1
                         continue
                     if self.shares[share_hash].time_seen > time.time() - 300:
@@ -406,28 +419,39 @@ class OkayTracker(forest.Tracker):
             #end = time.time()
             #print "removed! %i %f" % (len(to_remove), (end - start)/len(to_remove))
         
-        best = scores[-1] if scores else None
-        
         if best is not None:
-            best_share = self.verified.shares[best]
-            if ht.get_height_rel_highest(best_share.header['previous_block']) < ht.get_height_rel_highest(previous_block) and best_share.header_hash != previous_block and best_share.peer is not None:
+            best_share = self.shares[best]
+            if (best_share.header['previous_block'], best_share.header['bits']) != (previous_block, bits) and best_share.header_hash != previous_block and best_share.peer is not None:
                 if p2pool.DEBUG:
                     print 'Stale detected! %x < %x' % (best_share.header['previous_block'], previous_block)
                 best = best_share.previous_hash
+            
+            timestamp_cutoff = min(int(time.time()), best_share.timestamp) - 3600
+            target_cutoff = 2**256//(self.net.SHARE_PERIOD*best_tail_score[1] + 1) * 2 if best_tail_score[1] is not None else 2**256-1
+        else:
+            timestamp_cutoff = int(time.time()) - 24*60*60
+            target_cutoff = 2**256-1
+        
+        if p2pool.DEBUG:
+            print 'Desire %i shares. Cutoff: %s old diff>%.2f' % (len(desired), math.format_dt(time.time() - timestamp_cutoff), bitcoin_data.target_to_difficulty(target_cutoff))
+            for peer, hash, ts, targ in desired:
+                print '   ', '%s:%i' % peer.addr if peer is not None else None, format_hash(hash), math.format_dt(time.time() - ts), bitcoin_data.target_to_difficulty(targ), ts >= timestamp_cutoff, targ <= target_cutoff
         
-        return best, desired
+        return best, [(peer, hash) for peer, hash, ts, targ in desired if ts >= timestamp_cutoff and targ <= target_cutoff]
     
-    def score(self, share_hash, ht):
+    def score(self, share_hash, block_rel_height_func):
+        # returns approximate lower bound on chain's hashrate in the last self.net.CHAIN_LENGTH*15//16*self.net.SHARE_PERIOD time
+        
         head_height = self.verified.get_height(share_hash)
         if head_height < self.net.CHAIN_LENGTH:
             return head_height, None
         
         end_point = self.verified.get_nth_parent_hash(share_hash, self.net.CHAIN_LENGTH*15//16)
         
-        block_height = max(ht.get_height_rel_highest(share.header['previous_block']) for share in
+        block_height = max(block_rel_height_func(share.header['previous_block']) for share in
             self.verified.get_chain(end_point, self.net.CHAIN_LENGTH//16))
         
-        return self.net.CHAIN_LENGTH, (self.verified.get_work(share_hash) - self.verified.get_work(end_point))//(0 - block_height + 1)
+        return self.net.CHAIN_LENGTH, (self.verified.get_work(share_hash) - self.verified.get_work(end_point))//((0 - block_height + 1)*self.net.PARENT.BLOCK_PERIOD)
 
 def format_hash(x):
     if x is None:
@@ -464,7 +488,10 @@ class ShareStore(object):
                             yield 'verified_hash', verified_hash
                             verified_hashes.add(verified_hash)
                         elif type_id == 5:
-                            share = Share.from_share(share_type.unpack(data_hex.decode('hex')), self.net)
+                            raw_share = share_type.unpack(data_hex.decode('hex'))
+                            if raw_share['type'] in [0, 1]:
+                                continue
+                            share = Share.from_share(raw_share, self.net, None)
                             yield 'share', share
                             share_hashes.add(share.hash)
                         else: