indentation and imports cleaned up
[p2pool.git] / p2pool / data.py
index 2fed025..a463665 100644 (file)
@@ -1,25 +1,15 @@
 from __future__ import division
 
 import itertools
+import random
+import time
 
-from bitcoin import data as bitcoin_data
+from twisted.python import log
 
-class CompressedList(bitcoin_data.Type):
-    def __init__(self, inner):
-        self.inner = inner
-    
-    def read(self, file):
-        values = bitcoin_data.ListType(self.inner).read(file)
-        if values != sorted(set(values)):
-            raise ValueError("invalid values")
-        references = bitcoin_data.ListType(bitcoin_data.VarIntType()).read(file)
-        return [values[reference] for reference in references]
-    
-    def write(self, file, item):
-        values = sorted(set(item))
-        values_map = dict((value, i) for i, value in enumerate(values))
-        bitcoin_data.ListType(self.inner).write(file, values)
-        bitcoin_data.ListType(bitcoin_data.VarIntType()).write(file, [values_map[subitem] for subitem in item])
+import p2pool
+from p2pool import skiplists
+from p2pool.bitcoin import data as bitcoin_data, script
+from p2pool.util import memoize, expiring_dict, math
 
 
 merkle_branch_type = bitcoin_data.ListType(bitcoin_data.ComposedType([
@@ -30,14 +20,13 @@ merkle_branch_type = bitcoin_data.ListType(bitcoin_data.ComposedType([
 
 share_data_type = bitcoin_data.ComposedType([
     ('previous_share_hash', bitcoin_data.PossiblyNone(0, bitcoin_data.HashType())),
-    ('previous_shares_hash', bitcoin_data.HashType()),
-    ('target2', bitcoin_data.FloatingIntegerType()),
+    ('target', bitcoin_data.FloatingIntegerType()),
     ('nonce', bitcoin_data.VarStrType()),
 ])
 
 
 coinbase_type = bitcoin_data.ComposedType([
-    ('identifier', bitcoin_data.StructType('<Q')),
+    ('identifier', bitcoin_data.FixedStrType(8)),
     ('share_data', share_data_type),
 ])
 
@@ -49,7 +38,7 @@ share_info_type = bitcoin_data.ComposedType([
 
 
 share1a_type = bitcoin_data.ComposedType([
-    ('header', bitcoin_data.block_header_type), # merkle_header not completely needed
+    ('header', bitcoin_data.block_header_type),
     ('share_info', share_info_type),
     ('merkle_branch', merkle_branch_type),
 ])
@@ -60,8 +49,6 @@ share1b_type = bitcoin_data.ComposedType([
     ('other_txs', bitcoin_data.ListType(bitcoin_data.tx_type)),
 ])
 
-shares_type = CompressedList(bitcoin_data.VarStrType())
-
 def calculate_merkle_branch(txs, index):
     hash_list = [(bitcoin_data.tx_type.hash256(tx), i == index, []) for i, tx in enumerate(txs)]
     
@@ -97,126 +84,172 @@ def gentx_to_share_info(gentx):
         new_script=gentx['tx_outs'][-1]['script'],
     )
 
-def share_info_to_gentx(share_info, chain, net):
+def share_info_to_gentx(share_info, block_target, tracker, net):
     return generate_transaction(
-        previous_share2=chain.share2s[share_info['share_data']['previous_share_hash']],
-        nonce=share_info['share_data']['nonce'],
+        tracker=tracker,
+        previous_share_hash=share_info['share_data']['previous_share_hash'],
         new_script=share_info['new_script'],
         subsidy=share_info['subsidy'],
+        nonce=share_info['share_data']['nonce'],
+        block_target=block_target,
         net=net,
     )
 
 class Share(object):
+    @classmethod
+    def from_block(cls, block):
+        return cls(block['header'], gentx_to_share_info(block['txs'][0]), other_txs=block['txs'][1:])
+    
+    @classmethod
+    def from_share1a(cls, share1a):
+        return cls(**share1a)
+    
+    @classmethod
+    def from_share1b(cls, share1b):
+        return cls(**share1b)
+    
+    __slots__ = 'header previous_block share_info merkle_branch other_txs timestamp share_data new_script subsidy previous_hash previous_share_hash target nonce bitcoin_hash hash time_seen shared peer'.split(' ')
+    
     def __init__(self, header, share_info, merkle_branch=None, other_txs=None):
         if merkle_branch is None and other_txs is None:
             raise ValueError('need either merkle_branch or other_txs')
+        if other_txs is not None:
+            new_merkle_branch = calculate_merkle_branch([dict(version=0, tx_ins=[], tx_outs=[], lock_time=0)] + other_txs, 0)
+            if merkle_branch is not None:
+                if merke_branch != new_merkle_branch:
+                    raise ValueError('invalid merkle_branch and other_txs')
+            merkle_branch = new_merkle_branch
+        
+        if len(merkle_branch) > 16:
+            raise ValueError('merkle_branch too long!')
+        
         self.header = header
+        self.previous_block = header['previous_block']
         self.share_info = share_info
         self.merkle_branch = merkle_branch
         self.other_txs = other_txs
         
+        self.timestamp = self.header['timestamp']
+        
         self.share_data = self.share_info['share_data']
         self.new_script = self.share_info['new_script']
         self.subsidy = self.share_info['subsidy']
         
-        self.previous_share_hash = self.share_data['previous_share_hash']
-        self.previous_shares_hash = self.share_data['previous_shares_hash']
-        self.target2 = self.share_data['target2']
+        if len(self.new_script) > 100:
+            raise ValueError('new_script too long!')
         
-        self.hash = bitcoin_data.block_header_type.hash256(header)
-    
-    @classmethod
-    def from_block(cls, block):
-        return cls(block['header'], gentx_to_share_info(block['txs'][0]), other_txs=block['txs'][1:])
-    
-    @classmethod
-    def from_share1a(cls, share1a):
-        return cls(**share1a)
-    
-    @classmethod
-    def from_share1b(cls, share1b):
-        return cls(**share1b)
+        self.previous_hash = self.previous_share_hash = self.share_data['previous_share_hash']
+        self.target = self.share_data['target']
+        self.nonce = self.share_data['nonce']
+        
+        if len(self.nonce) > 100:
+            raise ValueError('nonce too long!')
+        
+        self.bitcoin_hash = bitcoin_data.block_header_type.hash256(header)
+        self.hash = share1a_type.hash256(self.as_share1a())
+        
+        if self.bitcoin_hash > self.target:
+            print 'hash', hex(self.bitcoin_hash)
+            print 'targ', hex(self.target)
+            raise ValueError('not enough work!')
+        
+        if script.get_sigop_count(self.new_script) > 1:
+            raise ValueError('too many sigops!')
+        
+        # XXX eww
+        self.time_seen = time.time()
+        self.shared = False
+        self.peer = None
     
-    def as_block(self):
-        if self.txs is None:
+    def as_block(self, tracker, net):
+        if self.other_txs is None:
             raise ValueError('share does not contain all txs')
         
-        return dict(header=self.header, txs=self.txs)
+        gentx = share_info_to_gentx(self.share_info, self.header['target'], tracker, net)
+        
+        return dict(header=self.header, txs=[gentx] + self.other_txs)
+    
+    def as_share1a(self):
+        return dict(header=self.header, share_info=self.share_info, merkle_branch=self.merkle_branch)
     
-    def as_share1(self):
-        return dict(header=self.header, gentx_info=self.gentx_info)
+    def as_share1b(self):
+        return dict(header=self.header, share_info=self.share_info, other_txs=self.other_txs)
     
-    def check(self, chain, height, previous_share2, net):
-        if self.chain_id_data != chain.chain_id_data:
-            raise ValueError('wrong chain')
+    def check(self, tracker, now, net):
+        import time
+        if self.previous_share_hash is not None:
+            if self.header['timestamp'] <= math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(self.previous_share_hash), 11)), use_float=False):
+                raise ValueError('share from too far in the past!')
         
-        if self.hash > net.TARGET_MULTIPLIER*bitcoin_data.bits_to_target(self.header['bits']):
-            raise ValueError('not enough work!')
+        if self.header['timestamp'] > now + 2*60*60:
+            raise ValueError('share from too far in the future!')
         
-        gentx, shares, merkle_root = gentx_info_to_gentx_shares_and_merkle_root(self.gentx_info, chain, net)
+        gentx = share_info_to_gentx(self.share_info, self.header['target'], tracker, net)
         
-        if merkle_root != self.header['merkle_root']:
-            raise ValueError("gentx doesn't match header")
+        if len(gentx['tx_ins'][0]['script']) > 100:
+            raise ValueError('''coinbase too large!''')
         
-        return Share2(self, shares, height)
-
-class Share2(object):
-    '''Share with associated data'''
-    
-    def __init__(self, share, shares, height):
-        self.share = share
-        self.shares = shares
-        self.height = height
+        if check_merkle_branch(gentx, self.merkle_branch) != self.header['merkle_root']:
+            raise ValueError('''gentx doesn't match header via merkle_branch''')
         
-        self.shared = False
+        if self.other_txs is not None:
+            if bitcoin_data.merkle_hash([gentx] + self.other_txs) != self.header['merkle_root']:
+                raise ValueError('''gentx doesn't match header via other_txs''')
+            
+            if len(bitcoin_data.block_type.pack(dict(header=self.header, txs=[gentx] + self.other_txs))) > 1000000 - 1000:
+                raise ValueError('''block size too large''')
     
     def flag_shared(self):
         self.shared = True
+    
+    def __repr__(self):
+        return '<Share %s>' % (' '.join('%s=%r' % (k, v) for k, v in self.__dict__.iteritems()),)
+
+def get_pool_attempts_per_second(tracker, previous_share_hash, net, dist=None):
+    if dist is None:
+        dist = net.TARGET_LOOKBEHIND
+    near = tracker.shares[previous_share_hash]
+    far = tracker.shares[tracker.get_nth_parent_hash(previous_share_hash, dist - 1)]
+    attempts = tracker.get_work(near.hash) - tracker.get_work(far.hash)
+    time = near.timestamp - far.timestamp
+    if time == 0:
+        time = 1
+    return attempts//time
 
 def generate_transaction(tracker, previous_share_hash, new_script, subsidy, nonce, block_target, net):
-    previous_share2 = tracker.shares[previous_share_hash] if previous_share_hash is not None else None
-    #previous_share2 = chain.shares
-    #previous_shares
-    #shares = 
-    #shares = (previous_share2.shares if previous_share2 is not None else [net.SCRIPT]*net.SPREAD)[1:-1] + [new_script, new_script]
-    
-    chain = list(itertools.islice(tracker.get_chain(previous_share_hash), net.CHAIN_LENGTH))
-    if len(chain) < 100:
-        target2 = bitcoin_data.FloatingIntegerType().truncate_to(2**256//2**32 - 1)
+    height, last = tracker.get_height_and_last(previous_share_hash)
+    if height < net.TARGET_LOOKBEHIND:
+        target = bitcoin_data.FloatingIntegerType().truncate_to(2**256//2**20 - 1)
     else:
-        attempts_per_second = sum(bitcoin_data.target_to_average_attempts(share.target) for share in itertools.islice(chain, 0, max(0, len(chain) - 1)))//(chain[0].timestamp - chain[-1].timestamp)
-        pre_target = 2**256*net.SHARE_PERIOD//attempts_per_second
-        pre_target2 = math.clip(pre_target, (previous_share2.target*9//10, previous_share2.target*11//10))
-        pre_target3 = math.clip(pre_target2, (0, 2**256//2**32 - 1))
-        target2 = bitcoin_data.FloatingIntegerType().truncate_to(pre_target3)
-    
+        attempts_per_second = get_pool_attempts_per_second(tracker, previous_share_hash, net)
+        pre_target = 2**256//(net.SHARE_PERIOD*attempts_per_second) - 1
+        previous_share = tracker.shares[previous_share_hash] if previous_share_hash is not None else None
+        pre_target2 = math.clip(pre_target, (previous_share.target*9//10, previous_share.target*11//10))
+        pre_target3 = math.clip(pre_target2, (0, net.MAX_TARGET))
+        target = bitcoin_data.FloatingIntegerType().truncate_to(pre_target3)
     
     attempts_to_block = bitcoin_data.target_to_average_attempts(block_target)
-    total_weight = 0
-    
-    class fake_share(object):
-        script = new_script
-        share = dict(target=target2)
+    max_weight = net.SPREAD * attempts_to_block
     
-    dest_weights = {}
-    for share in itertools.chain([fake_share], itertools.islice(tracker.get_chain(previous_share_hash), net.CHAIN_LENGTH)):
-        weight = bitcoin_data.target_to_average_attempts(share.share['target'])
-        weight = max(weight, attempts_to_block - total_weight)
-        
-        dest_weights[share.script] = dest_weights.get(share.script, 0) + weight
-        total_weight += weight
-        
-        if total_weight == attempts_to_block:
-            break
+    this_weight = min(bitcoin_data.target_to_average_attempts(target), max_weight)
+    other_weights, other_weights_total = tracker.get_cumulative_weights(previous_share_hash, min(height, net.CHAIN_LENGTH), max(0, max_weight - this_weight))
+    dest_weights, total_weight = math.add_dicts([{new_script: this_weight}, other_weights]), this_weight + other_weights_total
+    assert total_weight == sum(dest_weights.itervalues())
     
-    amounts = dict((script, subsidy*(199*weight)//(200*total_weight)) for (script, weight) in dest_weights.iteritems())
-    amounts[net.SCRIPT] = amounts.get(net.SCRIPT, 0) + subsidy*1//200 # prevent fake previous p2pool blocks
+    amounts = dict((script, subsidy*(396*weight)//(400*total_weight)) for (script, weight) in dest_weights.iteritems())
+    amounts[new_script] = amounts.get(new_script, 0) + subsidy*2//400
+    amounts[net.SCRIPT] = amounts.get(net.SCRIPT, 0) + subsidy*2//400
     amounts[net.SCRIPT] = amounts.get(net.SCRIPT, 0) + subsidy - sum(amounts.itervalues()) # collect any extra
+    if sum(amounts.itervalues()) != subsidy:
+        raise ValueError()
+    if any(x < 0 for x in amounts.itervalues()):
+        raise ValueError()
     
-    dests = sorted(amounts.iterkeys(), key=lambda script: (script == new_script, script))
-    assert dests[-1] == new_script, dests
+    pre_dests = sorted(amounts.iterkeys(), key=lambda script: (amounts[script], script))
+    pre_dests = pre_dests[-4000:] # block length limit, unlikely to ever be hit
     
-    previous_shares = [] # XXX
+    dests = sorted(pre_dests, key=lambda script: (script == new_script, script))
+    assert dests[-1] == new_script
     
     return dict(
         version=1,
@@ -227,9 +260,8 @@ def generate_transaction(tracker, previous_share_hash, new_script, subsidy, nonc
                 identifier=net.IDENTIFIER,
                 share_data=dict(
                     previous_share_hash=previous_share_hash,
-                    previous_shares_hash=shares_type.hash256(previous_shares),
                     nonce=nonce,
-                    target2=target2,
+                    target=target,
                 ),
             )),
         )],
@@ -238,112 +270,188 @@ def generate_transaction(tracker, previous_share_hash, new_script, subsidy, nonc
     )
 
 
-class Tracker(object):
-    def __init__(self):
-        self.shares = {} # hash -> share
-        self.reverse_shares = {} # previous_share_hash -> share_hash
-        self.heads = {} # hash -> (height, tail hash)
-        self.heads = set()
-    
-    def add_share(self, share):
-        if share.hash in self.shares:
-            return # XXX raise exception?
-        
-        self.shares[share.hash] = share
-        self.reverse_shares.setdefault(share.previous_share_hash, set()).add(share.hash)
+
+class OkayTracker(bitcoin_data.Tracker):
+    def __init__(self, net):
+        bitcoin_data.Tracker.__init__(self)
+        self.net = net
+        self.verified = bitcoin_data.Tracker()
         
-        if self.reverse_shares.get(share.hash, set()):
-            pass # not a head
-        else:
-            self.heads.add(share.hash)
-            if share.previous_share_hash in self.heads:
-                self.heads.remove(share.previous_share_hash)
+        self.get_cumulative_weights = skiplists.WeightsSkipList(self)
     
-    def get_chain(self, start):
-        share_hash_to_get = start
-        while share_hash_to_get in self.shares:
-            share = self.shares[share_hash_to_get]
-            yield share
-            share_hash_to_get = share.previous_share_hash
+    def add(self, share, known_verified=False):
+        bitcoin_data.Tracker.add(self, share)
+        if known_verified:
+            self.verified.add(share)
     
-    def get_best_share_hash(self):
-        if not self.heads:
-            return None
-        return max(self.heads, key=self.score_chain)
+    def attempt_verify(self, share, now):
+        if share.hash in self.verified.shares:
+            return True
+        height, last = self.get_height_and_last(share.hash)
+        if height < self.net.CHAIN_LENGTH and last is not None:
+            raise AssertionError()
+        try:
+            share.check(self, now, self.net)
+        except:
+            log.err(None, 'Share check failed:')
+            return False
+        else:
+            self.verified.add(share)
+            return True
     
-    def score_chain(self, start):
-        length = len(self.get_chain(start))
+    def think(self, ht, previous_block, now):
+        desired = set()
         
-        score = 0
-        for share in itertools.islice(self.get_chain(start), self.net.CHAIN_LENGTH):
-            score += a
+        # O(len(self.heads))
+        #   make 'unverified heads' set?
+        # for each overall head, attempt verification
+        # if it fails, attempt on parent, and repeat
+        # if no successful verification because of lack of parents, request parent
+        bads = set()
+        for head in set(self.heads) - set(self.verified.heads):
+            head_height, last = self.get_height_and_last(head)
+            
+            for share in itertools.islice(self.get_chain_known(head), None if last is None else min(5, max(0, head_height - self.net.CHAIN_LENGTH))):
+                if self.attempt_verify(share, now):
+                    break
+                if share.hash in self.heads:
+                    bads.add(share.hash)
+            else:
+                if last is not None:
+                    desired.add((self.shares[random.choice(list(self.reverse_shares[last]))].peer, last))
+        for bad in bads:
+            assert bad not in self.verified.shares
+            assert bad in self.heads
+            self.remove(bad)
         
-        return (min(length, 1000), score)
-
-class OkayTracker(Tracker):
-    def __init__(self):
-        Tracker.__init__(self)
-        self.okay_cache = set()
-    def is_okay(self, start):
-        '''
-        Returns:
-            {'result': 'okay', verified_height: ...} # if share has an okay parent or if share has CHAIN_LENGTH children and CHAIN_LENTH parents that it verified with
-            {'result': 'needs_parent', 'parent_hash': ...} # if share doesn't have CHAIN_LENGTH parents
-            {'result': 'needs_share_shares', 'share_hash': ...} # if share has CHAIN_LENGTH children and needs its shares to 
-            {'result': 'not_okay'} # if the share has a not okay parent or if the share has an okay parent and failed validation
-        '''
+        # try to get at least CHAIN_LENGTH height for each verified head, requesting parents if needed
+        for head in list(self.verified.heads):
+            head_height, last_hash = self.verified.get_height_and_last(head)
+            last_height, last_last_hash = self.get_height_and_last(last_hash)
+            # XXX review boundary conditions
+            want = max(self.net.CHAIN_LENGTH - head_height, 0)
+            can = max(last_height - 1 - self.net.CHAIN_LENGTH, 0) if last_last_hash is not None else last_height
+            get = min(want, can)
+            #print 'Z', head_height, last_hash is None, last_height, last_last_hash is None, want, can, get
+            for share in itertools.islice(self.get_chain_known(last_hash), get):
+                if not self.attempt_verify(share, now):
+                    break
+            if head_height < self.net.CHAIN_LENGTH and last_last_hash is not None:
+                desired.add((self.verified.shares[random.choice(list(self.verified.reverse_shares[last_hash]))].peer, last_last_hash))
         
-        length = len
-        to_end_rev = []
-        for share in itertools.islice(self.get_chain(start), self.net.CHAIN_LENGTH):
-            if share in self.okay_cache:
-                return validate(share, to_end_rev[::-1])
-            to_end_rev.append(share)
-        # picking up last share from for loop, ew
-        self.okay_cache.add(share)
-        return validate(share, to_end_rev[::-1])
-class Chain(object):
-    def __init__(self):
-        pass
-
-def get_chain_descriptor(tracker, start):
-    for item in tracker.get_chain(self.net.CHAIN_LENGTH):
-        a
-    pass
-
-if __name__ == '__main__':
-    class FakeShare(object):
-        def __init__(self, hash, previous_share_hash):
-            self.hash = hash
-            self.previous_share_hash = previous_share_hash
-    
-    t = Tracker()
+        # decide best tree
+        best_tail = max(self.verified.tails, key=lambda h: self.score(max(self.verified.tails[h], key=self.verified.get_height), ht)) if self.verified.tails else None
+        # decide best verified head
+        scores = sorted(self.verified.tails.get(best_tail, []), key=lambda h: (
+            self.verified.get_work(self.verified.get_nth_parent_hash(h, min(5, self.verified.get_height(h)))),
+            ht.get_min_height(self.verified.shares[h].previous_block),
+            self.verified.shares[h].peer is None,
+            -self.verified.shares[h].time_seen
+        ))
+        
+        
+        if p2pool.DEBUG:
+            print len(self.verified.tails.get(best_tail, []))
+            for h in scores:
+                print '   ', format_hash(h), format_hash(self.verified.shares[h].previous_hash), (
+                    self.verified.get_work(self.verified.get_nth_parent_hash(h, min(5, self.verified.get_height(h)))),
+                    ht.get_min_height(self.verified.shares[h].previous_block),
+                    self.verified.shares[h].peer is None,
+                    -self.verified.shares[h].time_seen
+                )
+        
+        # eat away at heads
+        if scores:
+            while True:
+                to_remove = set()
+                for share_hash, tail in self.heads.iteritems():
+                    if share_hash in scores[-5:]:
+                        continue
+                    if self.shares[share_hash].time_seen > time.time() - 30:
+                        continue
+                    if max(self.shares[before_tail_hash].time_seen for before_tail_hash in self.reverse_shares.get(tail)) > time.time() - 120:
+                        continue
+                    to_remove.add(share_hash)
+                for share_hash in to_remove:
+                    self.remove(share_hash)
+                    if share_hash in self.verified.shares:
+                        self.verified.remove(share_hash)
+                if not to_remove:
+                    break
+        
+        # drop tails
+        while True:
+            removed = False
+            # if removed from this, it must be removed from verified
+            for tail, heads in list(self.tails.iteritems()):
+                if min(self.get_height(head) for head in heads) < 2*self.net.CHAIN_LENGTH + 10:
+                    continue
+                start = time.time()
+                for aftertail in list(self.reverse_shares.get(tail, set())):
+                    self.remove(aftertail)
+                    if aftertail in self.verified.shares:
+                        self.verified.remove(aftertail)
+                    removed = True
+                end = time.time()
+                print "removed! %x %f" % (tail, end - start)
+            if not removed:
+                break
+        
+        best = scores[-1] if scores else None
+        
+        if best is not None:
+            best_share = self.verified.shares[best]
+            if ht.get_min_height(best_share.header['previous_block']) < ht.get_min_height(previous_block) and best_share.bitcoin_hash != previous_block and best_share.peer is not None:
+                if p2pool.DEBUG:
+                    print 'Stale detected!'
+                best = best_share.previous_hash
+        
+        return best, desired
     
-    t.add_share(FakeShare(1, 2))
-    print t.heads
-    t.add_share(FakeShare(4, 0))
-    print t.heads
-    t.add_share(FakeShare(3, 4))
-    print t.heads
+    @memoize.memoize_with_backing(expiring_dict.ExpiringDict(5, get_touches=False))
+    def score(self, share_hash, ht):
+        head_height, last = self.verified.get_height_and_last(share_hash)
+        score2 = 0
+        attempts = 0
+        max_height = 0
+        share2_hash = self.verified.get_nth_parent_hash(share_hash, min(self.net.CHAIN_LENGTH//2, head_height//2)) if last is not None else share_hash
+        for share in reversed(list(itertools.islice(self.verified.get_chain_known(share2_hash), self.net.CHAIN_LENGTH))):
+            max_height = max(max_height, ht.get_min_height(share.header['previous_block']))
+            attempts += bitcoin_data.target_to_average_attempts(share.target)
+            this_score = attempts//(ht.get_highest_height() - max_height + 1)
+            if this_score > score2:
+                score2 = this_score
+        return min(head_height, self.net.CHAIN_LENGTH), score2
+
+def format_hash(x):
+    if x is None:
+        return 'xxxxxxxx'
+    return '%08x' % (x % 2**32)
 
 class Mainnet(bitcoin_data.Mainnet):
     SHARE_PERIOD = 5 # seconds
-    CHAIN_LENGTH = 1000 # shares
-    SPREAD = 10 # blocks
-    ROOT_BLOCK = 0x6c9cb0589a44808d9a9361266a4ffb9fea2e2cf4d70bb2118b5
+    CHAIN_LENGTH = 24*60*60//5 # shares
+    TARGET_LOOKBEHIND = 200 # shares
+    SPREAD = 3 # blocks
     SCRIPT = '4104ffd03de44a6e11b9917f3a29f9443283d9871c9d743ef30d5eddcd37094b64d1b3d8090496b53256786bf5c82932ec23c3b74d9f05a6f95a8b5529352656664bac'.decode('hex')
-    IDENTIFIER = 0x7452839666e1f8f8
-    PREFIX = '2d4224bf18c87b87'.decode('hex')
+    IDENTIFIER = 'fc70035c7a81bc6f'.decode('hex')
+    PREFIX = '2472ef181efcd37b'.decode('hex')
     ADDRS_TABLE = 'addrs'
     P2P_PORT = 9333
+    MAX_TARGET = 2**256//2**32 - 1
+    PERSIST = True
+    HEADERSTORE_FILENAME = 'headers.dat'
 
 class Testnet(bitcoin_data.Testnet):
-    SHARE_PERIOD = 5 # seconds
-    CHAIN_LENGTH = 1000 # shares
-    SPREAD = 10 # blocks
-    ROOT_BLOCK = 0xd5070cd4f2987ad2191af71393731a2b143f094f7b84c9e6aa9a6a
+    SHARE_PERIOD = 1 # seconds
+    CHAIN_LENGTH = 24*60*60//5 # shares
+    TARGET_LOOKBEHIND = 200 # shares
+    SPREAD = 3 # blocks
     SCRIPT = '410403ad3dee8ab3d8a9ce5dd2abfbe7364ccd9413df1d279bf1a207849310465b0956e5904b1155ecd17574778f9949589ebfd4fb33ce837c241474a225cf08d85dac'.decode('hex')
-    IDENTIFIER = 0x1ae3479e4eb6700a
-    PREFIX = 'd19778c812754854'.decode('hex')
+    IDENTIFIER = '5fc2be2d4f0d6bfb'.decode('hex')
+    PREFIX = '3f6057a15036f441'.decode('hex')
     ADDRS_TABLE = 'addrs_testnet'
     P2P_PORT = 19333
+    MAX_TARGET = 2**256//2**20 - 1
+    PERSIST = False
+    HEADERSTORE_FILENAME = 'testnet_headers.dat'