unserialize bits to new FloatingInteger type to get around non-canonical compressed...
[p2pool.git] / p2pool / data.py
1 from __future__ import division
2
3 import itertools
4 import random
5 import time
6
7 from twisted.python import log
8
9 import p2pool
10 from p2pool import skiplists
11 from p2pool.bitcoin import data as bitcoin_data, script
12 from p2pool.util import memoize, expiring_dict, math
13
14
15 merkle_branch_type = bitcoin_data.ListType(bitcoin_data.ComposedType([
16     ('side', bitcoin_data.StructType('<B')), # enum?
17     ('hash', bitcoin_data.HashType()),
18 ]))
19
20
21 share_data_type = bitcoin_data.ComposedType([
22     ('previous_share_hash', bitcoin_data.PossiblyNone(0, bitcoin_data.HashType())),
23     ('target', bitcoin_data.FloatingIntegerType()),
24     ('nonce', bitcoin_data.VarStrType()),
25 ])
26
27
28 coinbase_type = bitcoin_data.ComposedType([
29     ('identifier', bitcoin_data.FixedStrType(8)),
30     ('share_data', share_data_type),
31 ])
32
33 share_info_type = bitcoin_data.ComposedType([
34     ('share_data', share_data_type),
35     ('new_script', bitcoin_data.VarStrType()),
36     ('subsidy', bitcoin_data.StructType('<Q')),
37 ])
38
39
40 share1a_type = bitcoin_data.ComposedType([
41     ('header', bitcoin_data.block_header_type),
42     ('share_info', share_info_type),
43     ('merkle_branch', merkle_branch_type),
44 ])
45
46 share1b_type = bitcoin_data.ComposedType([
47     ('header', bitcoin_data.block_header_type),
48     ('share_info', share_info_type),
49     ('other_txs', bitcoin_data.ListType(bitcoin_data.tx_type)),
50 ])
51
52 def calculate_merkle_branch(txs, index):
53     hash_list = [(bitcoin_data.tx_type.hash256(tx), i == index, []) for i, tx in enumerate(txs)]
54     
55     while len(hash_list) > 1:
56         hash_list = [
57             (
58                 bitcoin_data.merkle_record_type.hash256(dict(left=left, right=right)),
59                 left_f or right_f,
60                 (left_l if left_f else right_l) + [dict(side=1, hash=right) if left_f else dict(side=0, hash=left)],
61             )
62             for (left, left_f, left_l), (right, right_f, right_l) in
63                 zip(hash_list[::2], hash_list[1::2] + [hash_list[::2][-1]])
64         ]
65     
66     assert hash_list[0][1]
67     assert check_merkle_branch(txs[index], hash_list[0][2]) == hash_list[0][0]
68     
69     return hash_list[0][2]
70
71 def check_merkle_branch(tx, branch):
72     hash_ = bitcoin_data.tx_type.hash256(tx)
73     for step in branch:
74         if not step['side']:
75             hash_ = bitcoin_data.merkle_record_type.hash256(dict(left=step['hash'], right=hash_))
76         else:
77             hash_ = bitcoin_data.merkle_record_type.hash256(dict(left=hash_, right=step['hash']))
78     return hash_
79
80 def gentx_to_share_info(gentx):
81     return dict(
82         share_data=coinbase_type.unpack(gentx['tx_ins'][0]['script'])['share_data'],
83         subsidy=sum(tx_out['value'] for tx_out in gentx['tx_outs']),
84         new_script=gentx['tx_outs'][-1]['script'],
85     )
86
87 def share_info_to_gentx(share_info, block_target, tracker, net):
88     return generate_transaction(
89         tracker=tracker,
90         previous_share_hash=share_info['share_data']['previous_share_hash'],
91         new_script=share_info['new_script'],
92         subsidy=share_info['subsidy'],
93         nonce=share_info['share_data']['nonce'],
94         block_target=block_target,
95         net=net,
96     )
97
98 class Share(object):
99     @classmethod
100     def from_block(cls, block):
101         return cls(block['header'], gentx_to_share_info(block['txs'][0]), other_txs=block['txs'][1:])
102     
103     @classmethod
104     def from_share1a(cls, share1a):
105         return cls(**share1a)
106     
107     @classmethod
108     def from_share1b(cls, share1b):
109         return cls(**share1b)
110     
111     __slots__ = 'header previous_block share_info merkle_branch other_txs timestamp share_data new_script subsidy previous_hash previous_share_hash target nonce bitcoin_hash hash time_seen shared stored peer'.split(' ')
112     
113     def __init__(self, header, share_info, merkle_branch=None, other_txs=None):
114         if merkle_branch is None and other_txs is None:
115             raise ValueError('need either merkle_branch or other_txs')
116         if other_txs is not None:
117             new_merkle_branch = calculate_merkle_branch([dict(version=0, tx_ins=[], tx_outs=[], lock_time=0)] + other_txs, 0)
118             if merkle_branch is not None:
119                 if merke_branch != new_merkle_branch:
120                     raise ValueError('invalid merkle_branch and other_txs')
121             merkle_branch = new_merkle_branch
122         
123         if len(merkle_branch) > 16:
124             raise ValueError('merkle_branch too long!')
125         
126         self.header = header
127         self.previous_block = header['previous_block']
128         self.share_info = share_info
129         self.merkle_branch = merkle_branch
130         self.other_txs = other_txs
131         
132         self.timestamp = self.header['timestamp']
133         
134         self.share_data = self.share_info['share_data']
135         self.new_script = self.share_info['new_script']
136         self.subsidy = self.share_info['subsidy']
137         
138         if len(self.new_script) > 100:
139             raise ValueError('new_script too long!')
140         
141         self.previous_hash = self.previous_share_hash = self.share_data['previous_share_hash']
142         self.target = self.share_data['target']
143         self.nonce = self.share_data['nonce']
144         
145         if len(self.nonce) > 100:
146             raise ValueError('nonce too long!')
147         
148         self.bitcoin_hash = bitcoin_data.block_header_type.hash256(header)
149         self.hash = share1a_type.hash256(self.as_share1a())
150         
151         if self.bitcoin_hash > self.target:
152             print 'hash', hex(self.bitcoin_hash)
153             print 'targ', hex(self.target)
154             raise ValueError('not enough work!')
155         
156         if script.get_sigop_count(self.new_script) > 1:
157             raise ValueError('too many sigops!')
158         
159         # XXX eww
160         self.time_seen = time.time()
161         self.shared = False
162         self.stored = False
163         self.peer = None
164     
165     def as_block(self, tracker, net):
166         if self.other_txs is None:
167             raise ValueError('share does not contain all txs')
168         
169         gentx = share_info_to_gentx(self.share_info, self.header['target'], tracker, net)
170         
171         return dict(header=self.header, txs=[gentx] + self.other_txs)
172     
173     def as_share1a(self):
174         return dict(header=self.header, share_info=self.share_info, merkle_branch=self.merkle_branch)
175     
176     def as_share1b(self):
177         return dict(header=self.header, share_info=self.share_info, other_txs=self.other_txs)
178     
179     def check(self, tracker, now, net):
180         import time
181         if self.previous_share_hash is not None:
182             if self.header['timestamp'] <= math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(self.previous_share_hash), 11)), use_float=False):
183                 raise ValueError('share from too far in the past!')
184         
185         if self.header['timestamp'] > now + 2*60*60:
186             raise ValueError('share from too far in the future!')
187         
188         gentx = share_info_to_gentx(self.share_info, self.header['target'], tracker, net)
189         
190         if len(gentx['tx_ins'][0]['script']) > 100:
191             raise ValueError('''coinbase too large!''')
192         
193         if check_merkle_branch(gentx, self.merkle_branch) != self.header['merkle_root']:
194             raise ValueError('''gentx doesn't match header via merkle_branch''')
195         
196         if self.other_txs is not None:
197             if bitcoin_data.merkle_hash([gentx] + self.other_txs) != self.header['merkle_root']:
198                 raise ValueError('''gentx doesn't match header via other_txs''')
199             
200             if len(bitcoin_data.block_type.pack(dict(header=self.header, txs=[gentx] + self.other_txs))) > 1000000 - 1000:
201                 raise ValueError('''block size too large''')
202     
203     def flag_shared(self):
204         self.shared = True
205     
206     def __repr__(self):
207         return '<Share %s>' % (' '.join('%s=%r' % (k, v) for k, v in self.__dict__.iteritems()),)
208
209 def get_pool_attempts_per_second(tracker, previous_share_hash, net, dist=None):
210     if dist is None:
211         dist = net.TARGET_LOOKBEHIND
212     near = tracker.shares[previous_share_hash]
213     far = tracker.shares[tracker.get_nth_parent_hash(previous_share_hash, dist - 1)]
214     attempts = tracker.get_work(near.hash) - tracker.get_work(far.hash)
215     time = near.timestamp - far.timestamp
216     if time == 0:
217         time = 1
218     return attempts//time
219
220 def generate_transaction(tracker, previous_share_hash, new_script, subsidy, nonce, block_target, net):
221     height, last = tracker.get_height_and_last(previous_share_hash)
222     assert height >= net.CHAIN_LENGTH or last is None
223     if height < net.TARGET_LOOKBEHIND:
224         target = bitcoin_data.FloatingInteger.from_target_upper_bound(net.MAX_TARGET)
225     else:
226         attempts_per_second = get_pool_attempts_per_second(tracker, previous_share_hash, net)
227         pre_target = 2**256//(net.SHARE_PERIOD*attempts_per_second) - 1
228         previous_share = tracker.shares[previous_share_hash] if previous_share_hash is not None else None
229         pre_target2 = math.clip(pre_target, (previous_share.target*9//10, previous_share.target*11//10))
230         pre_target3 = math.clip(pre_target2, (0, net.MAX_TARGET))
231         target = bitcoin_data.FloatingInteger.from_target_upper_bound(pre_target3)
232     
233     attempts_to_block = bitcoin_data.target_to_average_attempts(block_target)
234     max_weight = net.SPREAD * attempts_to_block
235     
236     this_weight = min(bitcoin_data.target_to_average_attempts(target), max_weight)
237     other_weights, other_weights_total = tracker.get_cumulative_weights(previous_share_hash, min(height, net.CHAIN_LENGTH), max(0, max_weight - this_weight))
238     dest_weights, total_weight = math.add_dicts([{new_script: this_weight}, other_weights]), this_weight + other_weights_total
239     assert total_weight == sum(dest_weights.itervalues())
240     
241     amounts = dict((script, subsidy*(396*weight)//(400*total_weight)) for (script, weight) in dest_weights.iteritems())
242     amounts[new_script] = amounts.get(new_script, 0) + subsidy*2//400
243     amounts[net.SCRIPT] = amounts.get(net.SCRIPT, 0) + subsidy*2//400
244     amounts[net.SCRIPT] = amounts.get(net.SCRIPT, 0) + subsidy - sum(amounts.itervalues()) # collect any extra
245     if sum(amounts.itervalues()) != subsidy:
246         raise ValueError()
247     if any(x < 0 for x in amounts.itervalues()):
248         raise ValueError()
249     
250     pre_dests = sorted(amounts.iterkeys(), key=lambda script: (amounts[script], script))
251     pre_dests = pre_dests[-4000:] # block length limit, unlikely to ever be hit
252     
253     dests = sorted(pre_dests, key=lambda script: (script == new_script, script))
254     assert dests[-1] == new_script
255     
256     return dict(
257         version=1,
258         tx_ins=[dict(
259             previous_output=None,
260             sequence=None,
261             script=coinbase_type.pack(dict(
262                 identifier=net.IDENTIFIER,
263                 share_data=dict(
264                     previous_share_hash=previous_share_hash,
265                     nonce=nonce,
266                     target=target,
267                 ),
268             )),
269         )],
270         tx_outs=[dict(value=amounts[script], script=script) for script in dests if amounts[script]],
271         lock_time=0,
272     )
273
274
275
276 class OkayTracker(bitcoin_data.Tracker):
277     def __init__(self, net):
278         bitcoin_data.Tracker.__init__(self)
279         self.net = net
280         self.verified = bitcoin_data.Tracker()
281         self.verified.get_nth_parent_hash = self.get_nth_parent_hash # self is a superset of self.verified
282         
283         self.get_cumulative_weights = skiplists.WeightsSkipList(self)
284     
285     def add(self, share, known_verified=False):
286         bitcoin_data.Tracker.add(self, share)
287         if known_verified:
288             self.verified.add(share)
289     
290     def attempt_verify(self, share, now):
291         if share.hash in self.verified.shares:
292             return True
293         height, last = self.get_height_and_last(share.hash)
294         if height < self.net.CHAIN_LENGTH + 1 and last is not None:
295             raise AssertionError()
296         try:
297             share.check(self, now, self.net)
298         except:
299             log.err(None, 'Share check failed:')
300             return False
301         else:
302             self.verified.add(share)
303             return True
304     
305     def think(self, ht, previous_block, now):
306         desired = set()
307         
308         # O(len(self.heads))
309         #   make 'unverified heads' set?
310         # for each overall head, attempt verification
311         # if it fails, attempt on parent, and repeat
312         # if no successful verification because of lack of parents, request parent
313         bads = set()
314         for head in set(self.heads) - set(self.verified.heads):
315             head_height, last = self.get_height_and_last(head)
316             
317             for share in itertools.islice(self.get_chain_known(head), None if last is None else min(5, max(0, head_height - self.net.CHAIN_LENGTH))):
318                 if self.attempt_verify(share, now):
319                     break
320                 if share.hash in self.heads:
321                     bads.add(share.hash)
322             else:
323                 if last is not None:
324                     desired.add((self.shares[random.choice(list(self.reverse_shares[last]))].peer, last))
325         for bad in bads:
326             assert bad not in self.verified.shares
327             assert bad in self.heads
328             if p2pool.DEBUG:
329                 print "BAD", bad
330             self.remove(bad)
331         
332         # try to get at least CHAIN_LENGTH height for each verified head, requesting parents if needed
333         for head in list(self.verified.heads):
334             head_height, last_hash = self.verified.get_height_and_last(head)
335             last_height, last_last_hash = self.get_height_and_last(last_hash)
336             # XXX review boundary conditions
337             want = max(self.net.CHAIN_LENGTH - head_height, 0)
338             can = max(last_height - 1 - self.net.CHAIN_LENGTH, 0) if last_last_hash is not None else last_height
339             get = min(want, can)
340             #print 'Z', head_height, last_hash is None, last_height, last_last_hash is None, want, can, get
341             for share in itertools.islice(self.get_chain_known(last_hash), get):
342                 if not self.attempt_verify(share, now):
343                     break
344             if head_height < self.net.CHAIN_LENGTH and last_last_hash is not None:
345                 desired.add((self.verified.shares[random.choice(list(self.verified.reverse_shares[last_hash]))].peer, last_last_hash))
346         
347         # decide best tree
348         best_tail = max(self.verified.tails, key=lambda h: self.score(max(self.verified.tails[h], key=self.verified.get_height), ht)) if self.verified.tails else None
349         # decide best verified head
350         scores = sorted(self.verified.tails.get(best_tail, []), key=lambda h: (
351             self.verified.get_work(self.verified.get_nth_parent_hash(h, min(5, self.verified.get_height(h)))),
352             ht.get_min_height(self.verified.shares[h].previous_block),
353             self.verified.shares[h].peer is None,
354             -self.verified.shares[h].time_seen
355         ))
356         
357         
358         if p2pool.DEBUG:
359             print len(self.verified.tails.get(best_tail, [])), '\\/\\/\\/\\/\\/'
360             if len(scores) > 10:
361                 print '    ...'
362             for h in scores[-10:]:
363                 print '   ', format_hash(h), format_hash(self.verified.shares[h].previous_hash), (
364                     self.verified.get_work(self.verified.get_nth_parent_hash(h, min(5, self.verified.get_height(h)))),
365                     ht.get_min_height(self.verified.shares[h].previous_block),
366                     self.verified.shares[h].peer is None,
367                     -self.verified.shares[h].time_seen
368                 )
369         
370         # eat away at heads
371         if scores:
372             while True:
373                 to_remove = set()
374                 for share_hash, tail in self.heads.iteritems():
375                     if share_hash in scores[-5:]:
376                         #print 1
377                         continue
378                     if self.shares[share_hash].time_seen > time.time() - 30:
379                         #print 2
380                         continue
381                     if share_hash not in self.verified.shares and max(self.shares[after_tail_hash].time_seen for after_tail_hash in self.reverse_shares.get(tail)) > time.time() - 120: # XXX stupid
382                         #print 3
383                         continue
384                     to_remove.add(share_hash)
385                 if not to_remove:
386                     break
387                 for share_hash in to_remove:
388                     self.remove(share_hash)
389                     if share_hash in self.verified.shares:
390                         self.verified.remove(share_hash)
391                 #print "_________", to_remove
392         
393         # drop tails
394         while True:
395             to_remove = set()
396             for tail, heads in self.tails.iteritems():
397                 if min(self.get_height(head) for head in heads) < 2*self.net.CHAIN_LENGTH + 10:
398                     continue
399                 for aftertail in self.reverse_shares.get(tail, set()):
400                     if len(self.reverse_shares[self.shares[aftertail].previous_hash]) > 1: # XXX
401                         print "raw"
402                         continue
403                     to_remove.add(aftertail)
404             if not to_remove:
405                 break
406             # if removed from this, it must be removed from verified
407             #start = time.time()
408             for aftertail in to_remove:
409                 if self.shares[aftertail].previous_hash not in self.tails:
410                     print "erk", aftertail, self.shares[aftertail].previous_hash
411                     continue
412                 self.remove(aftertail)
413                 if aftertail in self.verified.shares:
414                     self.verified.remove(aftertail)
415             #end = time.time()
416             #print "removed! %i %f" % (len(to_remove), (end - start)/len(to_remove))
417         
418         best = scores[-1] if scores else None
419         
420         if best is not None:
421             best_share = self.verified.shares[best]
422             if ht.get_min_height(best_share.header['previous_block']) < ht.get_min_height(previous_block) and best_share.bitcoin_hash != previous_block and best_share.peer is not None:
423                 if p2pool.DEBUG:
424                     print 'Stale detected!'
425                 best = best_share.previous_hash
426         
427         return best, desired
428     
429     @memoize.memoize_with_backing(expiring_dict.ExpiringDict(5, get_touches=False))
430     def score(self, share_hash, ht):
431         head_height, last = self.verified.get_height_and_last(share_hash)
432         score2 = 0
433         attempts = 0
434         max_height = 0
435         share2_hash = self.verified.get_nth_parent_hash(share_hash, min(self.net.CHAIN_LENGTH//2, head_height//2)) if last is not None else share_hash
436         for share in reversed(list(itertools.islice(self.verified.get_chain_known(share2_hash), self.net.CHAIN_LENGTH))):
437             max_height = max(max_height, ht.get_min_height(share.header['previous_block']))
438             attempts += bitcoin_data.target_to_average_attempts(share.target)
439             this_score = attempts//(ht.get_highest_height() - max_height + 1)
440             if this_score > score2:
441                 score2 = this_score
442         return min(head_height, self.net.CHAIN_LENGTH), score2
443
444 def format_hash(x):
445     if x is None:
446         return 'xxxxxxxx'
447     return '%08x' % (x % 2**32)
448
449 class ShareStore(object):
450     def __init__(self, filename, net):
451         self.filename = filename
452         self.net = net
453     
454     def get_shares(self):
455         open(self.filename, 'a').close() # make empty file if it doesn't exist
456         
457         with open(self.filename) as f:
458             for line in f:
459                 try:
460                     type_id_str, data_hex = line.strip().split(' ')
461                     type_id = int(type_id_str)
462                     if type_id == 0:
463                         yield 'share', Share.from_share1a(share1a_type.unpack(data_hex.decode('hex')))
464                     elif type_id == 1:
465                         yield 'share', Share.from_share1b(share1b_type.unpack(data_hex.decode('hex')))
466                     elif type_id == 2:
467                         yield 'verified_hash', int(data_hex, 16)
468                     else:
469                         raise NotImplementedError("share type %i" % (type_id,))
470                 except Exception:
471                     log.err(None, "Error while reading saved shares, continuing where left off:")
472     
473     def add_share(self, share):
474         f = open(self.filename, 'a')
475         if share.bitcoin_hash <= share.header['target']:
476             type_id, data = 1, share1b_type.pack(share.as_share1b())
477         else:
478             type_id, data = 0, share1a_type.pack(share.as_share1a())
479         f.write("%i %s\n" % (type_id, data.encode('hex')))
480         f.close()
481     
482     def add_verified_hash(self, share_hash):
483         f = open(self.filename, 'a')
484         f.write("%i %x\n" % (2, share_hash))
485         f.close()
486
487 class Mainnet(bitcoin_data.Mainnet):
488     SHARE_PERIOD = 5 # seconds
489     CHAIN_LENGTH = 24*60*60//5 # shares
490     TARGET_LOOKBEHIND = 200 # shares
491     SPREAD = 3 # blocks
492     SCRIPT = '4104ffd03de44a6e11b9917f3a29f9443283d9871c9d743ef30d5eddcd37094b64d1b3d8090496b53256786bf5c82932ec23c3b74d9f05a6f95a8b5529352656664bac'.decode('hex')
493     IDENTIFIER = 'fc70035c7a81bc6f'.decode('hex')
494     PREFIX = '2472ef181efcd37b'.decode('hex')
495     ADDRS_TABLE = 'addrs'
496     P2P_PORT = 9333
497     MAX_TARGET = 2**256//2**32 - 1
498     PERSIST = True
499     SHARESTORE_FILENAME = 'shares.dat'
500     HEADERSTORE_FILENAME = 'headers.dat'
501
502 class Testnet(bitcoin_data.Testnet):
503     SHARE_PERIOD = 1 # seconds
504     CHAIN_LENGTH = 24*60*60//5 # shares
505     TARGET_LOOKBEHIND = 200 # shares
506     SPREAD = 3 # blocks
507     SCRIPT = '410403ad3dee8ab3d8a9ce5dd2abfbe7364ccd9413df1d279bf1a207849310465b0956e5904b1155ecd17574778f9949589ebfd4fb33ce837c241474a225cf08d85dac'.decode('hex')
508     IDENTIFIER = '5fc2be2d4f0d6bfb'.decode('hex')
509     PREFIX = '3f6057a15036f441'.decode('hex')
510     ADDRS_TABLE = 'addrs_testnet'
511     P2P_PORT = 19333
512     MAX_TARGET = 2**256//2**20 - 1
513     PERSIST = False
514     SHARESTORE_FILENAME = 'testnet_shares.dat'
515     HEADERSTORE_FILENAME = 'testnet_headers.dat'