1 from __future__ import division
9 from p2pool.util import bases, math
11 class EarlyEnd(Exception):
14 class LateEnd(Exception):
17 def read((data, pos), length):
18 data2 = data[pos:pos + length]
19 if len(data2) != length:
21 return data2, (data, pos + length)
24 # the same data can have only one unpacked representation, but multiple packed binary representations
27 # return hash(tuple(self.__dict__.items()))
29 #def __eq__(self, other):
30 # if not isinstance(other, Type):
31 # raise NotImplementedError()
32 # return self.__dict__ == other.__dict__
34 def _unpack(self, data):
35 obj, (data2, pos) = self.read((data, 0))
45 f = self.write(None, obj)
55 def unpack(self, data):
56 obj = self._unpack(data)
59 data2 = self._pack(obj)
61 assert self._unpack(data2) == obj
66 data = self._pack(obj)
68 assert self._unpack(data) == obj
73 def pack_base58(self, obj):
74 return base58.base58_encode(self.pack(obj))
76 def unpack_base58(self, base58_data):
77 return self.unpack(base58.base58_decode(base58_data))
80 def hash160(self, obj):
81 return ShortHashType().unpack(hashlib.new('ripemd160', hashlib.sha256(self.pack(obj)).digest()).digest())
83 def hash256(self, obj):
84 return HashType().unpack(hashlib.sha256(hashlib.sha256(self.pack(obj)).digest()).digest())
86 class VarIntType(Type):
87 # redundancy doesn't matter here because bitcoin and p2pool both reencode before hashing
89 data, file = read(file, 1)
94 desc, length = '<H', 2
96 desc, length = '<I', 4
98 desc, length = '<Q', 8
100 raise AssertionError()
101 data, file = read(file, length)
102 return struct.unpack(desc, data)[0], file
104 def write(self, file, item):
106 file = file, struct.pack('<B', item)
108 file = file, struct.pack('<BH', 0xfd, item)
109 elif item <= 0xffffffff:
110 file = file, struct.pack('<BI', 0xfe, item)
111 elif item <= 0xffffffffffffffff:
112 file = file, struct.pack('<BQ', 0xff, item)
114 raise ValueError('int too large for varint')
117 class VarStrType(Type):
118 _inner_size = VarIntType()
120 def read(self, file):
121 length, file = self._inner_size.read(file)
122 return read(file, length)
124 def write(self, file, item):
125 return self._inner_size.write(file, len(item)), item
127 class FixedStrType(Type):
128 def __init__(self, length):
131 def read(self, file):
132 return read(file, self.length)
134 def write(self, file, item):
135 if len(item) != self.length:
136 raise ValueError('incorrect length item!')
139 class EnumType(Type):
140 def __init__(self, inner, values):
145 for k, v in values.iteritems():
147 raise ValueError('duplicate value in values')
150 def read(self, file):
151 data, file = self.inner.read(file)
152 return self.keys[data], file
154 def write(self, file, item):
155 return self.inner.write(file, self.values[item])
157 class HashType(Type):
158 def read(self, file):
159 data, file = read(file, 256//8)
160 return int(data[::-1].encode('hex'), 16), file
162 def write(self, file, item):
163 if not 0 <= item < 2**256:
164 raise ValueError('invalid hash value - %r' % (item,))
165 if item != 0 and item < 2**160:
166 warnings.warn('very low hash value - maybe you meant to use ShortHashType? %x' % (item,))
167 return file, ('%064x' % (item,)).decode('hex')[::-1]
169 class ShortHashType(Type):
170 def read(self, file):
171 data, file = read(file, 160//8)
172 return int(data[::-1].encode('hex'), 16), file
174 def write(self, file, item):
175 if not 0 <= item < 2**160:
176 raise ValueError('invalid hash value - %r' % (item,))
177 return file, ('%040x' % (item,)).decode('hex')[::-1]
179 class ListType(Type):
180 _inner_size = VarIntType()
182 def __init__(self, type):
185 def read(self, file):
186 length, file = self._inner_size.read(file)
188 for i in xrange(length):
189 item, file = self.type.read(file)
193 def write(self, file, item):
194 file = self._inner_size.write(file, len(item))
196 file = self.type.write(file, subitem)
199 class StructType(Type):
200 def __init__(self, desc):
202 self.length = struct.calcsize(self.desc)
204 def read(self, file):
205 data, file = read(file, self.length)
206 res, = struct.unpack(self.desc, data)
209 def write(self, file, item):
210 data = struct.pack(self.desc, item)
211 if struct.unpack(self.desc, data)[0] != item:
212 # special test because struct doesn't error on some overflows
213 raise ValueError('''item didn't survive pack cycle (%r)''' % (item,))
216 class IPV6AddressType(Type):
217 def read(self, file):
218 data, file = read(file, 16)
219 if data[:12] != '00000000000000000000ffff'.decode('hex'):
220 raise ValueError('ipv6 addresses not supported yet')
221 return '.'.join(str(ord(x)) for x in data[12:]), file
223 def write(self, file, item):
224 bits = map(int, item.split('.'))
226 raise ValueError('invalid address: %r' % (bits,))
227 data = '00000000000000000000ffff'.decode('hex') + ''.join(chr(x) for x in bits)
228 assert len(data) == 16, len(data)
231 class ComposedType(Type):
232 def __init__(self, fields):
235 def read(self, file):
237 for key, type_ in self.fields:
238 item[key], file = type_.read(file)
241 def write(self, file, item):
242 for key, type_ in self.fields:
243 file = type_.write(file, item[key])
246 class ChecksummedType(Type):
247 def __init__(self, inner):
250 def read(self, file):
251 obj, file = self.inner.read(file)
252 data = self.inner.pack(obj)
254 checksum, file = read(file, 4)
255 if checksum != hashlib.sha256(hashlib.sha256(data).digest()).digest()[:4]:
256 raise ValueError('invalid checksum')
260 def write(self, file, item):
261 data = self.inner.pack(item)
262 return (file, data), hashlib.sha256(hashlib.sha256(data).digest()).digest()[:4]
264 class FloatingIntegerType(Type):
265 # redundancy doesn't matter here because bitcoin checks binary bits against its own computed bits
266 # so it will always be encoded 'normally' in blocks (they way bitcoin does it)
267 _inner = StructType('<I')
269 def read(self, file):
270 bits, file = self._inner.read(file)
271 target = self._bits_to_target(bits)
273 if self._target_to_bits(target) != bits:
274 raise ValueError('bits in non-canonical form')
277 def write(self, file, item):
278 return self._inner.write(file, self._target_to_bits(item))
280 def truncate_to(self, x):
281 return self._bits_to_target(self._target_to_bits(x, _check=False))
283 def _bits_to_target(self, bits2):
284 target = math.shift_left(bits2 & 0x00ffffff, 8 * ((bits2 >> 24) - 3))
285 assert target == self._bits_to_target1(struct.pack('<I', bits2))
286 assert self._target_to_bits(target, _check=False) == bits2
289 def _bits_to_target1(self, bits):
291 length = ord(bits[0])
292 return bases.string_to_natural((bits[1:] + '\0'*length)[:length])
294 def _target_to_bits(self, target, _check=True):
295 n = bases.natural_to_string(target)
296 if n and ord(n[0]) >= 128:
298 bits2 = (chr(len(n)) + (n + 3*chr(0))[:3])[::-1]
299 bits = struct.unpack('<I', bits2)[0]
301 if self._bits_to_target(bits) != target:
302 raise ValueError(repr((target, self._bits_to_target(bits, _check=False))))
305 class PossiblyNone(Type):
306 def __init__(self, none_value, inner):
307 self.none_value = none_value
310 def read(self, file):
311 value, file = self.inner.read(file)
312 return None if value == self.none_value else value, file
314 def write(self, file, item):
315 if item == self.none_value:
316 raise ValueError('none_value used')
317 return self.inner.write(file, self.none_value if item is None else item)
319 address_type = ComposedType([
320 ('services', StructType('<Q')),
321 ('address', IPV6AddressType()),
322 ('port', StructType('>H')),
325 tx_type = ComposedType([
326 ('version', StructType('<I')),
327 ('tx_ins', ListType(ComposedType([
328 ('previous_output', PossiblyNone(dict(hash=0, index=2**32 - 1), ComposedType([
329 ('hash', HashType()),
330 ('index', StructType('<I')),
332 ('script', VarStrType()),
333 ('sequence', PossiblyNone(2**32 - 1, StructType('<I'))),
335 ('tx_outs', ListType(ComposedType([
336 ('value', StructType('<Q')),
337 ('script', VarStrType()),
339 ('lock_time', StructType('<I')),
342 block_header_type = ComposedType([
343 ('version', StructType('<I')),
344 ('previous_block', PossiblyNone(0, HashType())),
345 ('merkle_root', HashType()),
346 ('timestamp', StructType('<I')),
347 ('target', FloatingIntegerType()),
348 ('nonce', StructType('<I')),
351 block_type = ComposedType([
352 ('header', block_header_type),
353 ('txs', ListType(tx_type)),
357 merkle_record_type = ComposedType([
358 ('left', HashType()),
359 ('right', HashType()),
362 def merkle_hash(tx_list):
365 hash_list = map(tx_type.hash256, tx_list)
366 while len(hash_list) > 1:
367 hash_list = [merkle_record_type.hash256(dict(left=left, right=left if right is None else right))
368 for left, right in zip(hash_list[::2], hash_list[1::2] + [None])]
371 def target_to_average_attempts(target):
372 return 2**256//(target + 1)
376 human_address_type = ChecksummedType(ComposedType([
377 ('version', StructType('<B')),
378 ('pubkey_hash', ShortHashType()),
381 pubkey_type = FixedStrType(65)
383 def pubkey_hash_to_address(pubkey_hash, net):
384 return human_address_type.pack_base58(dict(version=net.BITCOIN_ADDRESS_VERSION, pubkey_hash=pubkey_hash))
386 def pubkey_to_address(pubkey, net):
387 return pubkey_hash_to_address(pubkey_type.hash160(pubkey), net)
389 def address_to_pubkey_hash(address, net):
390 x = human_address_type.unpack_base58(address)
391 if x['version'] != net.BITCOIN_ADDRESS_VERSION:
392 raise ValueError('address not for this net!')
393 return x['pubkey_hash']
397 def pubkey_to_script2(pubkey):
398 return ('\x41' + pubkey_type.pack(pubkey)) + '\xac'
400 def pubkey_hash_to_script2(pubkey_hash):
401 return '\x76\xa9' + ('\x14' + ShortHashType().pack(pubkey_hash)) + '\x88\xac'
403 # linked list tracker
405 class Tracker(object):
407 self.shares = {} # hash -> share
408 self.ids = {} # hash -> (id, height)
409 self.reverse_shares = {} # previous_hash -> set of share_hashes
411 self.heads = {} # head hash -> tail_hash
412 self.tails = {} # tail hash -> set of head hashes
413 self.heights = {} # share_hash -> height_to, other_share_hash
414 self.skips = {} # share_hash -> skip list
416 self.id_generator = itertools.count()
417 self.tails_by_id = {}
419 def add(self, share):
420 assert not isinstance(share, (int, long, type(None)))
421 if share.hash in self.shares:
422 return # XXX raise exception?
425 parent_id = self.ids.get(share.previous_hash, None)
426 children_ids = set(self.ids.get(share2_hash) for share2_hash in self.reverse_shares.get(share.hash, set()))
428 if parent_id is not None:
429 infos.add((parent_id[0], parent_id[1] + 1))
430 for child_id in children_ids:
431 infos.add((child_id[0], child_id[1] - 1))
433 infos.add((self.id_generator.next(), 0))
437 self.shares[share.hash] = share
438 self.reverse_shares.setdefault(share.previous_hash, set()).add(share.hash)
440 if share.hash in self.tails:
441 heads = self.tails.pop(share.hash)
443 heads = set([share.hash])
445 if share.previous_hash in self.heads:
446 tail = self.heads.pop(share.previous_hash)
448 #dist, tail = self.get_height_and_last(share.previous_hash) # XXX this should be moved out of the critical area even though it shouldn't matter
449 tail = share.previous_hash
450 while tail in self.shares:
451 tail = self.shares[tail].previous_hash
453 self.tails.setdefault(tail, set()).update(heads)
454 if share.previous_hash in self.tails[tail]:
455 self.tails[tail].remove(share.previous_hash)
458 self.heads[head] = tail
462 for s in self.shares.itervalues():
465 assert self.shares == t.shares, (self.shares, t.shares)
466 assert self.reverse_shares == t.reverse_shares, (self.reverse_shares, t.reverse_shares)
467 assert self.heads == t.heads, (self.heads, t.heads)
468 assert self.tails == t.tails, (self.tails, t.tails)
470 def remove(self, share_hash):
471 assert isinstance(share_hash, (int, long, type(None)))
472 if share_hash not in self.shares:
474 share = self.shares[share_hash]
477 if share.hash in self.heads and share.previous_hash in self.tails:
478 tail = self.heads.pop(share.hash)
479 self.tails[tail].remove(share.hash)
480 if not self.tails[share.previous_hash]:
481 self.tails.pop(share.previous_hash)
482 elif share.hash in self.heads:
483 tail = self.heads.pop(share.hash)
484 self.tails[tail].remove(share.hash)
485 if self.reverse_shares[share.previous_hash] != set([share.hash]):
488 self.tails[tail].add(share.previous_hash)
489 self.heads[share.previous_hash] = tail
490 elif share.previous_hash in self.tails:
491 raise NotImplementedError() # will break other things..
492 heads = self.tails[share.previous_hash]
493 if len(self.reverse_shares[share.previous_hash]) > 1:
494 raise NotImplementedError()
496 del self.tails[share.previous_hash]
498 self.heads[head] = share.hash
499 self.tails[share.hash] = set(heads)
501 raise NotImplementedError()
504 height, tail = self.get_height_and_last(share.hash)
506 if share.hash in self.heads:
507 my_heads = set([share.hash])
508 elif share.previous_hash in self.tails:
509 my_heads = self.tails[share.previous_hash]
511 some_heads = self.tails[tail]
512 some_heads_heights = dict((that_head, self.get_height_and_last(that_head)[0]) for that_head in some_heads)
513 my_heads = set(that_head for that_head in some_heads
514 if some_heads_heights[that_head] > height and
515 self.get_nth_parent_hash(that_head, some_heads_heights[that_head] - height) == share.hash)
517 if share.previous_hash != tail:
518 self.heads[share.previous_hash] = tail
520 for head in my_heads:
521 if head != share.hash:
522 self.heads[head] = share.hash
526 if share.hash in self.heads:
527 self.heads.pop(share.hash)
530 self.tails[tail].difference_update(my_heads)
531 if share.previous_hash != tail:
532 self.tails[tail].add(share.previous_hash)
533 if not self.tails[tail]:
535 if my_heads != set([share.hash]):
536 self.tails[share.hash] = set(my_heads) - set([share.hash])
539 self.shares.pop(share.hash)
540 self.reverse_shares[share.previous_hash].remove(share.hash)
541 if not self.reverse_shares[share.previous_hash]:
542 self.reverse_shares.pop(share.previous_hash)
544 assert self.test() is None
546 def get_height_and_last(self, share_hash):
547 assert isinstance(share_hash, (int, long, type(None)))
552 if share_hash is None or share_hash not in self.shares:
554 updates.append((share_hash, height))
555 if share_hash in self.heights:
556 height_inc, share_hash = self.heights[share_hash]
558 height_inc, share_hash = 1, self.shares[share_hash].previous_hash
560 for update_hash, height_then in updates:
561 self.heights[update_hash] = height - height_then, share_hash
562 #assert (height, share_hash) == self.get_height_and_last2(orig), ((height, share_hash), self.get_height_and_last2(orig))
563 return height, share_hash
565 def get_height_and_last2(self, share_hash):
566 assert isinstance(share_hash, (int, long, type(None)))
569 if share_hash not in self.shares:
571 share_hash = self.shares[share_hash].previous_hash
573 return height, share_hash
575 def get_chain_known(self, start_hash):
576 assert isinstance(start_hash, (int, long, type(None)))
578 Chain starting with item of hash I{start_hash} of items that this Tracker contains
580 item_hash_to_get = start_hash
582 if item_hash_to_get not in self.shares:
584 share = self.shares[item_hash_to_get]
585 assert not isinstance(share, long)
587 item_hash_to_get = share.previous_hash
589 def get_chain_to_root(self, start_hash, root=None):
590 assert isinstance(start_hash, (int, long, type(None)))
591 assert isinstance(root, (int, long, type(None)))
593 Chain of hashes starting with share_hash of shares to the root (doesn't include root)
594 Raises an error if one is missing
596 share_hash_to_get = start_hash
597 while share_hash_to_get != root:
598 share = self.shares[share_hash_to_get]
600 share_hash_to_get = share.previous_hash
602 def get_best_hash(self):
604 Returns hash of item with the most items in its chain
608 return max(self.heads, key=self.get_height_and_last)
610 def get_highest_height(self):
611 return max(self.get_height_and_last(head)[0] for head in self.heads) if self.heads else 0
613 def get_nth_parent_hash(self, item_hash, n):
615 raise ValueError('n must be >= 0')
619 if item_hash not in self.skips:
620 self.skips[item_hash] = math.geometric(.5), [(1, self.shares[item_hash].previous_hash)]
621 skip_length, skip = self.skips[item_hash]
623 for i in xrange(skip_length):
625 n_then, that_hash = updates.pop(i)
626 x, y = self.skips[that_hash]
628 y.append((n_then - n, item_hash))
630 for i in xrange(len(skip), skip_length):
631 updates[i] = n, item_hash
633 for i, (dist, then_hash) in enumerate(reversed(skip)):
637 raise AssertionError()
640 item_hash = then_hash
644 def get_nth_parent2(self, item_hash, n):
647 x = self.shares[item_hash].previous_hash
650 def distance_up_to_branch(self, item_hash, max_dist=None):
655 if __name__ == '__main__':
656 class FakeShare(object):
657 def __init__(self, hash, previous_hash):
659 self.previous_hash = previous_hash
663 for i in xrange(100):
664 t.add(FakeShare(i, i - 1 if i > 0 else None))
668 print "HEADS", t.heads
669 print "TAILS", t.tails
678 for i in xrange(random.randrange(100)):
679 x = random.choice(list(t.shares) + [None])
681 t.add(FakeShare(i, x))
683 x = random.choice(list(t.shares))
684 print "DEL", x, t.__dict__
687 except NotImplementedError:
688 print "aborted; not implemented"
691 print "HEADS", t.heads
692 print "TAILS", t.tails
694 #for share_hash, share in sorted(t.shares.iteritems()):
695 # print share_hash, share.previous_hash, t.heads.get(share_hash), t.tails.get(share_hash)
697 import sys;sys.exit()
699 print t.get_nth_parent_hash(9000, 5000)
700 print t.get_nth_parent_hash(9001, 412)
701 #print t.get_nth_parent_hash(90, 51)
703 for share_hash in sorted(t.shares):
704 print str(share_hash).rjust(4),
705 x = t.skips.get(share_hash, None)
707 print str(x[0]).rjust(4),
709 print str(a).rjust(10),
712 # network definitions
714 class Mainnet(object):
715 BITCOIN_P2P_PREFIX = 'f9beb4d9'.decode('hex')
716 BITCOIN_P2P_PORT = 8333
717 BITCOIN_ADDRESS_VERSION = 0
719 class Testnet(object):
720 BITCOIN_P2P_PREFIX = 'fabfb5da'.decode('hex')
721 BITCOIN_P2P_PORT = 18333
722 BITCOIN_ADDRESS_VERSION = 111