1 from __future__ import division
7 from twisted.python import log
10 from p2pool import skiplists
11 from p2pool.bitcoin import data as bitcoin_data, script
12 from p2pool.util import math, forest, pack
15 share_data_type = pack.ComposedType([
16 ('previous_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))),
17 ('coinbase', pack.VarStrType()),
18 ('nonce', pack.VarStrType()),
19 ('new_script', pack.VarStrType()),
20 ('subsidy', pack.IntType(64)),
21 ('donation', pack.IntType(16)),
22 ('stale_info', pack.IntType(8)), # 0 nothing, 253 orphan, 254 doa
25 share_info_type = pack.ComposedType([
26 ('share_data', share_data_type),
27 ('bits', bitcoin_data.FloatingIntegerType()),
28 ('timestamp', pack.IntType(32)),
31 share1a_type = pack.ComposedType([
32 ('header', bitcoin_data.block_header_type),
33 ('share_info', share_info_type),
34 ('merkle_branch', bitcoin_data.merkle_branch_type),
37 share1b_type = pack.ComposedType([
38 ('header', bitcoin_data.block_header_type),
39 ('share_info', share_info_type),
40 ('other_txs', pack.ListType(bitcoin_data.tx_type)),
47 share_type = pack.ComposedType([
48 ('type', pack.VarIntType()),
49 ('contents', pack.VarStrType()),
53 __slots__ = 'header previous_block share_info merkle_branch other_txs timestamp share_data new_script subsidy previous_hash previous_share_hash target nonce pow_hash header_hash hash time_seen peer donation net'.split(' ')
56 def from_share(cls, share, net):
57 if share['type'] == 0:
58 res = cls.from_share1a(share1a_type.unpack(share['contents']), net)
59 if not (res.pow_hash > res.header['bits'].target):
60 raise ValueError('invalid share type')
62 elif share['type'] == 1:
63 res = cls.from_share1b(share1b_type.unpack(share['contents']), net)
64 if not (res.pow_hash <= res.header['bits'].target):
65 raise ValueError('invalid share type')
68 raise ValueError('unknown share type: %r' % (share['type'],))
71 def from_share1a(cls, share1a, net):
72 return cls(net, **share1a)
75 def from_share1b(cls, share1b, net):
76 return cls(net, merkle_branch=bitcoin_data.calculate_merkle_branch([0] + [bitcoin_data.hash256(bitcoin_data.tx_type.pack(x)) for x in share1b['other_txs']], 0), **share1b)
78 def __init__(self, net, header, share_info, merkle_branch, other_txs=None):
81 if p2pool.DEBUG and other_txs is not None and bitcoin_data.calculate_merkle_branch([0] + [bitcoin_data.hash256(bitcoin_data.tx_type.pack(x)) for x in other_txs], 0) != merkle_branch:
82 raise ValueError('merkle_branch and other_txs do not match')
84 if len(merkle_branch) > 16:
85 raise ValueError('merkle_branch too long!')
88 self.previous_block = header['previous_block']
89 self.share_info = share_info
90 self.merkle_branch = merkle_branch
92 self.share_data = self.share_info['share_data']
93 self.target = self.share_info['bits'].target
94 self.timestamp = self.share_info['timestamp']
96 self.new_script = self.share_data['new_script']
97 self.subsidy = self.share_data['subsidy']
98 self.donation = self.share_data['donation']
100 if len(self.new_script) > 100:
101 raise ValueError('new_script too long!')
103 self.previous_hash = self.previous_share_hash = self.share_data['previous_share_hash']
104 self.nonce = self.share_data['nonce']
106 if len(self.nonce) > 100:
107 raise ValueError('nonce too long!')
109 if len(self.share_data['coinbase']) > 100:
110 raise ValueError('''coinbase too large! %i bytes''' % (len(self.share_data['coinbase']),))
112 self.pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
113 self.header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header))
115 self.hash = bitcoin_data.hash256(share1a_type.pack(self.as_share1a()))
117 if self.pow_hash > self.target:
118 print 'hash %x' % self.pow_hash
119 print 'targ %x' % self.target
120 raise ValueError('not enough work!')
122 if other_txs is not None and not self.pow_hash <= self.header['bits'].target:
123 raise ValueError('other_txs provided when not a block solution')
124 if other_txs is None and self.pow_hash <= self.header['bits'].target:
125 raise ValueError('other_txs not provided when a block solution')
127 self.other_txs = other_txs
130 self.time_seen = time.time()
134 return '<Share %s>' % (' '.join('%s=%r' % (k, getattr(self, k)) for k in self.__slots__),)
136 def check(self, tracker):
137 if script.get_sigop_count(self.new_script) > 1:
138 raise ValueError('too many sigops!')
140 share_info, gentx = generate_transaction(tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.net)
141 if share_info != self.share_info:
142 raise ValueError('share difficulty invalid')
144 if bitcoin_data.check_merkle_branch(bitcoin_data.hash256(bitcoin_data.tx_type.pack(gentx)), 0, self.merkle_branch) != self.header['merkle_root']:
145 raise ValueError('''gentx doesn't match header via merkle_branch''')
148 if self.pow_hash > self.header['bits'].target: # share1a
149 return dict(type=0, contents=share1a_type.pack(self.as_share1a()))
150 elif self.pow_hash <= self.header['bits'].target: # share1b
151 return dict(type=1, contents=share1b_type.pack(self.as_share1b()))
153 raise AssertionError()
155 def as_share1a(self):
156 return dict(header=self.header, share_info=self.share_info, merkle_branch=self.merkle_branch)
158 def as_share1b(self):
159 if self.other_txs is None:
160 raise ValueError('share does not contain all txs')
162 return dict(header=self.header, share_info=self.share_info, other_txs=self.other_txs)
164 def as_block(self, tracker):
165 if self.other_txs is None:
166 raise ValueError('share does not contain all txs')
168 share_info, gentx = generate_transaction(tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.net)
169 assert share_info == self.share_info
171 return dict(header=self.header, txs=[gentx] + self.other_txs)
173 def get_pool_attempts_per_second(tracker, previous_share_hash, dist):
175 near = tracker.shares[previous_share_hash]
176 far = tracker.shares[tracker.get_nth_parent_hash(previous_share_hash, dist - 1)]
177 attempts = tracker.get_work(near.hash) - tracker.get_work(far.hash)
178 time = near.timestamp - far.timestamp
181 return attempts//time
183 def get_average_stale_prop(tracker, share_hash, lookbehind):
184 stales = sum(1 for share in tracker.get_chain(share_hash, lookbehind) if share.share_data['stale_info'] in [253, 254])
185 return stales/(lookbehind + stales)
187 def generate_transaction(tracker, share_data, block_target, desired_timestamp, net):
188 previous_share_hash = share_data['previous_share_hash']
189 new_script = share_data['new_script']
190 subsidy = share_data['subsidy']
191 donation = share_data['donation']
192 assert 0 <= donation <= 65535
194 if len(share_data['coinbase']) > 100:
195 raise ValueError('coinbase too long!')
197 previous_share = tracker.shares[previous_share_hash] if previous_share_hash is not None else None
199 chain_length = getattr(net, 'REAL_CHAIN_LENGTH_FUNC', lambda _: net.REAL_CHAIN_LENGTH)(previous_share.timestamp if previous_share is not None else None)
201 height, last = tracker.get_height_and_last(previous_share_hash)
202 assert height >= chain_length or last is None
203 if height < net.TARGET_LOOKBEHIND:
204 bits = bitcoin_data.FloatingInteger.from_target_upper_bound(net.MAX_TARGET)
206 attempts_per_second = get_pool_attempts_per_second(tracker, previous_share_hash, net.TARGET_LOOKBEHIND)
207 pre_target = 2**256//(net.SHARE_PERIOD*attempts_per_second) - 1
208 pre_target2 = math.clip(pre_target, (previous_share.target*9//10, previous_share.target*11//10))
209 pre_target3 = math.clip(pre_target2, (0, net.MAX_TARGET))
210 bits = bitcoin_data.FloatingInteger.from_target_upper_bound(pre_target3)
212 attempts_to_block = bitcoin_data.target_to_average_attempts(block_target)
213 max_att = net.SPREAD * attempts_to_block
215 this_att = min(bitcoin_data.target_to_average_attempts(bits.target), max_att)
216 other_weights, other_total_weight, other_donation_weight = tracker.get_cumulative_weights(previous_share_hash, min(height, chain_length), 65535*max(0, max_att - this_att))
217 assert other_total_weight == sum(other_weights.itervalues()) + other_donation_weight, (other_total_weight, sum(other_weights.itervalues()) + other_donation_weight)
218 weights, total_weight, donation_weight = math.add_dicts({new_script: this_att*(65535-donation)}, other_weights), this_att*65535 + other_total_weight, this_att*donation + other_donation_weight
219 assert total_weight == sum(weights.itervalues()) + donation_weight, (total_weight, sum(weights.itervalues()) + donation_weight)
221 SCRIPT = '4104ffd03de44a6e11b9917f3a29f9443283d9871c9d743ef30d5eddcd37094b64d1b3d8090496b53256786bf5c82932ec23c3b74d9f05a6f95a8b5529352656664bac'.decode('hex')
223 # 1 satoshi is always donated so that a list of p2pool generated blocks can be easily found by looking at the donation address
224 amounts = dict((script, (subsidy-1)*(199*weight)//(200*total_weight)) for (script, weight) in weights.iteritems())
225 amounts[new_script] = amounts.get(new_script, 0) + (subsidy-1)//200
226 amounts[SCRIPT] = amounts.get(SCRIPT, 0) + (subsidy-1)*(199*donation_weight)//(200*total_weight)
227 amounts[SCRIPT] = amounts.get(SCRIPT, 0) + subsidy - sum(amounts.itervalues()) # collect any extra satoshis :P
229 if sum(amounts.itervalues()) != subsidy:
231 if any(x < 0 for x in amounts.itervalues()):
234 dests = sorted(amounts.iterkeys(), key=lambda script: (amounts[script], script))
235 dests = dests[-4000:] # block length limit, unlikely to ever be hit
238 share_data=share_data,
240 timestamp=math.clip(desired_timestamp, (previous_share.timestamp - 60, previous_share.timestamp + 60)) if previous_share is not None else desired_timestamp,
243 return share_info, dict(
246 previous_output=None,
248 script=share_data['coinbase'].ljust(2, '\x00'),
250 tx_outs=[dict(value=0, script='\x20' + pack.IntType(256).pack(bitcoin_data.hash256(share_info_type.pack(share_info))))] + [dict(value=amounts[script], script=script) for script in dests if amounts[script]],
254 class OkayTracker(forest.Tracker):
255 def __init__(self, net, my_share_hashes, my_doa_share_hashes):
256 forest.Tracker.__init__(self)
258 self.verified = forest.Tracker(delta_type=forest.get_attributedelta_type(dict(forest.AttributeDelta.attrs,
259 my_count=lambda share: 1 if share.hash in my_share_hashes else 0,
260 my_doa_count=lambda share: 1 if share.hash in my_doa_share_hashes else 0,
261 my_orphan_announce_count=lambda share: 1 if share.hash in my_share_hashes and share.share_data['stale_info'] == 253 else 0,
262 my_dead_announce_count=lambda share: 1 if share.hash in my_share_hashes and share.share_data['stale_info'] == 254 else 0,
264 self.verified.get_nth_parent_hash = self.get_nth_parent_hash # self is a superset of self.verified
266 self.get_cumulative_weights = skiplists.WeightsSkipList(self)
268 def attempt_verify(self, share):
269 if share.hash in self.verified.shares:
271 height, last = self.get_height_and_last(share.hash)
272 if height < self.net.CHAIN_LENGTH + 1 and last is not None:
273 raise AssertionError()
277 log.err(None, 'Share check failed:')
280 self.verified.add(share)
283 def think(self, ht, previous_block):
287 # make 'unverified heads' set?
288 # for each overall head, attempt verification
289 # if it fails, attempt on parent, and repeat
290 # if no successful verification because of lack of parents, request parent
292 for head in set(self.heads) - set(self.verified.heads):
293 head_height, last = self.get_height_and_last(head)
295 for share in self.get_chain(head, head_height if last is None else min(5, max(0, head_height - self.net.CHAIN_LENGTH))):
296 if self.attempt_verify(share):
298 if share.hash in self.heads:
302 desired.add((self.shares[random.choice(list(self.reverse_shares[last]))].peer, last))
304 assert bad not in self.verified.shares
305 assert bad in self.heads
310 # try to get at least CHAIN_LENGTH height for each verified head, requesting parents if needed
311 for head in list(self.verified.heads):
312 head_height, last_hash = self.verified.get_height_and_last(head)
313 last_height, last_last_hash = self.get_height_and_last(last_hash)
314 # XXX review boundary conditions
315 want = max(self.net.CHAIN_LENGTH - head_height, 0)
316 can = max(last_height - 1 - self.net.CHAIN_LENGTH, 0) if last_last_hash is not None else last_height
318 #print 'Z', head_height, last_hash is None, last_height, last_last_hash is None, want, can, get
319 for share in self.get_chain(last_hash, get):
320 if not self.attempt_verify(share):
322 if head_height < self.net.CHAIN_LENGTH and last_last_hash is not None:
323 desired.add((self.verified.shares[random.choice(list(self.verified.reverse_shares[last_hash]))].peer, last_last_hash))
325 print len(self.verified.tails), "tails:"
326 for x in self.verified.tails:
327 print format_hash(x), self.score(max(self.verified.tails[x], key=self.verified.get_height), ht)
330 best_tail = max(self.verified.tails, key=lambda h: self.score(max(self.verified.tails[h], key=self.verified.get_height), ht)) if self.verified.tails else None
331 # decide best verified head
332 scores = sorted(self.verified.tails.get(best_tail, []), key=lambda h: (
333 self.verified.get_work(self.verified.get_nth_parent_hash(h, min(5, self.verified.get_height(h)))),
334 #self.verified.shares[h].peer is None,
335 0 if self.verified.shares[h].peer is None else ht.get_height_rel_highest(self.verified.shares[h].previous_block),
336 -self.verified.shares[h].time_seen
341 print len(self.verified.tails), "chain tails and", len(self.verified.tails.get(best_tail, [])), 'chain heads. Top 10 heads:'
344 for h in scores[-10:]:
345 print ' ', format_hash(h), format_hash(self.verified.shares[h].previous_hash), (
346 self.verified.get_work(self.verified.get_nth_parent_hash(h, min(5, self.verified.get_height(h)))),
347 self.verified.shares[h].peer is None,
348 0 if self.verified.shares[h].peer is None else ht.get_height_rel_highest(self.verified.shares[h].previous_block),
349 -self.verified.shares[h].time_seen
354 for i in xrange(1000):
356 for share_hash, tail in self.heads.iteritems():
357 if share_hash in scores[-5:]:
360 if self.shares[share_hash].time_seen > time.time() - 300:
363 if share_hash not in self.verified.shares and max(self.shares[after_tail_hash].time_seen for after_tail_hash in self.reverse_shares.get(tail)) > time.time() - 120: # XXX stupid
366 to_remove.add(share_hash)
369 for share_hash in to_remove:
370 self.remove(share_hash)
371 if share_hash in self.verified.shares:
372 self.verified.remove(share_hash)
373 #print "_________", to_remove
376 for i in xrange(1000):
378 for tail, heads in self.tails.iteritems():
379 if min(self.get_height(head) for head in heads) < 2*self.net.CHAIN_LENGTH + 10:
381 for aftertail in self.reverse_shares.get(tail, set()):
382 if len(self.reverse_shares[self.shares[aftertail].previous_hash]) > 1: # XXX
385 to_remove.add(aftertail)
388 # if removed from this, it must be removed from verified
390 for aftertail in to_remove:
391 if self.shares[aftertail].previous_hash not in self.tails:
392 print "erk", aftertail, self.shares[aftertail].previous_hash
394 self.remove(aftertail)
395 if aftertail in self.verified.shares:
396 self.verified.remove(aftertail)
398 #print "removed! %i %f" % (len(to_remove), (end - start)/len(to_remove))
400 best = scores[-1] if scores else None
403 best_share = self.verified.shares[best]
404 if ht.get_height_rel_highest(best_share.header['previous_block']) < ht.get_height_rel_highest(previous_block) and best_share.header_hash != previous_block and best_share.peer is not None:
406 print 'Stale detected! %x < %x' % (best_share.header['previous_block'], previous_block)
407 best = best_share.previous_hash
411 def score(self, share_hash, ht):
412 head_height = self.verified.get_height(share_hash)
413 if head_height < self.net.CHAIN_LENGTH:
414 return head_height, None
416 end_point = self.verified.get_nth_parent_hash(share_hash, self.net.CHAIN_LENGTH*15//16)
418 block_height = max(ht.get_height_rel_highest(share.header['previous_block']) for share in
419 self.verified.get_chain(end_point, self.net.CHAIN_LENGTH//16))
421 return self.net.CHAIN_LENGTH, (self.verified.get_work(share_hash) - self.verified.get_work(end_point))//(0 - block_height + 1)
426 return '%08x' % (x % 2**32)
428 class ShareStore(object):
429 def __init__(self, prefix, net):
430 self.filename = prefix
431 self.dirname = os.path.dirname(os.path.abspath(prefix))
432 self.filename = os.path.basename(os.path.abspath(prefix))
434 self.known = None # will be filename -> set of share hashes, set of verified hashes
435 self.known_desired = None
437 def get_shares(self):
438 if self.known is not None:
439 raise AssertionError()
441 filenames, next = self.get_filenames_and_next()
442 for filename in filenames:
443 share_hashes, verified_hashes = known.setdefault(filename, (set(), set()))
444 with open(filename, 'rb') as f:
447 type_id_str, data_hex = line.strip().split(' ')
448 type_id = int(type_id_str)
454 verified_hash = int(data_hex, 16)
455 yield 'verified_hash', verified_hash
456 verified_hashes.add(verified_hash)
458 share = Share.from_share(share_type.unpack(data_hex.decode('hex')), self.net)
460 share_hashes.add(share.hash)
462 raise NotImplementedError("share type %i" % (type_id,))
464 log.err(None, "Error while reading saved shares, continuing where left off:")
466 self.known_desired = dict((k, (set(a), set(b))) for k, (a, b) in known.iteritems())
468 def _add_line(self, line):
469 filenames, next = self.get_filenames_and_next()
470 if filenames and os.path.getsize(filenames[-1]) < 10e6:
471 filename = filenames[-1]
475 with open(filename, 'ab') as f:
480 def add_share(self, share):
481 for filename, (share_hashes, verified_hashes) in self.known.iteritems():
482 if share.hash in share_hashes:
485 filename = self._add_line("%i %s" % (5, share_type.pack(share.as_share()).encode('hex')))
486 share_hashes, verified_hashes = self.known.setdefault(filename, (set(), set()))
487 share_hashes.add(share.hash)
488 share_hashes, verified_hashes = self.known_desired.setdefault(filename, (set(), set()))
489 share_hashes.add(share.hash)
491 def add_verified_hash(self, share_hash):
492 for filename, (share_hashes, verified_hashes) in self.known.iteritems():
493 if share_hash in verified_hashes:
496 filename = self._add_line("%i %x" % (2, share_hash))
497 share_hashes, verified_hashes = self.known.setdefault(filename, (set(), set()))
498 verified_hashes.add(share_hash)
499 share_hashes, verified_hashes = self.known_desired.setdefault(filename, (set(), set()))
500 verified_hashes.add(share_hash)
502 def get_filenames_and_next(self):
503 suffixes = sorted(int(x[len(self.filename):]) for x in os.listdir(self.dirname) if x.startswith(self.filename) and x[len(self.filename):].isdigit())
504 return [os.path.join(self.dirname, self.filename + str(suffix)) for suffix in suffixes], os.path.join(self.dirname, self.filename + (str(suffixes[-1] + 1) if suffixes else str(0)))
506 def forget_share(self, share_hash):
507 for filename, (share_hashes, verified_hashes) in self.known_desired.iteritems():
508 if share_hash in share_hashes:
509 share_hashes.remove(share_hash)
512 def forget_verified_share(self, share_hash):
513 for filename, (share_hashes, verified_hashes) in self.known_desired.iteritems():
514 if share_hash in verified_hashes:
515 verified_hashes.remove(share_hash)
518 def check_remove(self):
520 for filename, (share_hashes, verified_hashes) in self.known_desired.iteritems():
521 #print filename, len(share_hashes) + len(verified_hashes)
522 if not share_hashes and not verified_hashes:
523 to_remove.add(filename)
524 for filename in to_remove:
525 self.known.pop(filename)
526 self.known_desired.pop(filename)
528 print "REMOVED", filename