1 from __future__ import division
6 from . import base58, skiplists
7 from p2pool.util import bases, math, variable
10 class EarlyEnd(Exception):
13 class LateEnd(Exception):
16 def read((data, pos), length):
17 data2 = data[pos:pos + length]
18 if len(data2) != length:
20 return data2, (data, pos + length)
22 def size((data, pos)):
23 return len(data) - pos
26 # the same data can have only one unpacked representation, but multiple packed binary representations
29 # return hash(tuple(self.__dict__.items()))
31 #def __eq__(self, other):
32 # if not isinstance(other, Type):
33 # raise NotImplementedError()
34 # return self.__dict__ == other.__dict__
36 def _unpack(self, data):
37 obj, (data2, pos) = self.read((data, 0))
47 f = self.write(None, obj)
57 def unpack(self, data):
58 obj = self._unpack(data)
61 data2 = self._pack(obj)
63 if self._unpack(data2) != obj:
64 raise AssertionError()
69 data = self._pack(obj)
72 if self._unpack(data) != obj:
73 raise AssertionError()
78 def pack_base58(self, obj):
79 return base58.base58_encode(self.pack(obj))
81 def unpack_base58(self, base58_data):
82 return self.unpack(base58.base58_decode(base58_data))
85 def hash160(self, obj):
86 return ShortHashType().unpack(hashlib.new('ripemd160', hashlib.sha256(self.pack(obj)).digest()).digest())
88 def hash256(self, obj):
89 return HashType().unpack(hashlib.sha256(hashlib.sha256(self.pack(obj)).digest()).digest())
91 class VarIntType(Type):
92 # redundancy doesn't matter here because bitcoin and p2pool both reencode before hashing
94 data, file = read(file, 1)
99 desc, length = '<H', 2
101 desc, length = '<I', 4
103 desc, length = '<Q', 8
105 raise AssertionError()
106 data, file = read(file, length)
107 return struct.unpack(desc, data)[0], file
109 def write(self, file, item):
111 file = file, struct.pack('<B', item)
113 file = file, struct.pack('<BH', 0xfd, item)
114 elif item <= 0xffffffff:
115 file = file, struct.pack('<BI', 0xfe, item)
116 elif item <= 0xffffffffffffffff:
117 file = file, struct.pack('<BQ', 0xff, item)
119 raise ValueError('int too large for varint')
122 class VarStrType(Type):
123 _inner_size = VarIntType()
125 def read(self, file):
126 length, file = self._inner_size.read(file)
127 return read(file, length)
129 def write(self, file, item):
130 return self._inner_size.write(file, len(item)), item
132 class FixedStrType(Type):
133 def __init__(self, length):
136 def read(self, file):
137 return read(file, self.length)
139 def write(self, file, item):
140 if len(item) != self.length:
141 raise ValueError('incorrect length item!')
144 class EnumType(Type):
145 def __init__(self, inner, values):
150 for k, v in values.iteritems():
152 raise ValueError('duplicate value in values')
155 def read(self, file):
156 data, file = self.inner.read(file)
157 return self.keys[data], file
159 def write(self, file, item):
160 return self.inner.write(file, self.values[item])
162 class HashType(Type):
163 def read(self, file):
164 data, file = read(file, 256//8)
165 return int(data[::-1].encode('hex'), 16), file
167 def write(self, file, item):
168 if not 0 <= item < 2**256:
169 raise ValueError('invalid hash value - %r' % (item,))
170 if item != 0 and item < 2**160:
171 print 'Very low hash value - maybe you meant to use ShortHashType? %x' % (item,)
172 return file, ('%064x' % (item,)).decode('hex')[::-1]
174 class ShortHashType(Type):
175 def read(self, file):
176 data, file = read(file, 160//8)
177 return int(data[::-1].encode('hex'), 16), file
179 def write(self, file, item):
180 if not 0 <= item < 2**160:
181 raise ValueError('invalid hash value - %r' % (item,))
182 return file, ('%040x' % (item,)).decode('hex')[::-1]
184 class ListType(Type):
185 _inner_size = VarIntType()
187 def __init__(self, type):
190 def read(self, file):
191 length, file = self._inner_size.read(file)
193 for i in xrange(length):
194 item, file = self.type.read(file)
198 def write(self, file, item):
199 file = self._inner_size.write(file, len(item))
201 file = self.type.write(file, subitem)
204 class StructType(Type):
205 def __init__(self, desc):
207 self.length = struct.calcsize(self.desc)
209 def read(self, file):
210 data, file = read(file, self.length)
211 res, = struct.unpack(self.desc, data)
214 def write(self, file, item):
215 data = struct.pack(self.desc, item)
216 if struct.unpack(self.desc, data)[0] != item:
217 # special test because struct doesn't error on some overflows
218 raise ValueError('''item didn't survive pack cycle (%r)''' % (item,))
221 class IPV6AddressType(Type):
222 def read(self, file):
223 data, file = read(file, 16)
224 if data[:12] != '00000000000000000000ffff'.decode('hex'):
225 raise ValueError('ipv6 addresses not supported yet')
226 return '.'.join(str(ord(x)) for x in data[12:]), file
228 def write(self, file, item):
229 bits = map(int, item.split('.'))
231 raise ValueError('invalid address: %r' % (bits,))
232 data = '00000000000000000000ffff'.decode('hex') + ''.join(chr(x) for x in bits)
233 assert len(data) == 16, len(data)
238 def get_record(fields):
239 fields = tuple(sorted(fields))
242 if fields not in _record_types:
243 class _Record(object):
245 def __getitem__(self, key):
246 return getattr(self, key)
247 def __setitem__(self, key, value):
248 setattr(self, key, value)
250 # for field in self.__slots__:
251 # yield field, getattr(self, field)
253 return self.__slots__
254 def __eq__(self, other):
255 if isinstance(other, dict):
256 return dict(self) == other
257 elif isinstance(other, _Record):
258 return all(self[k] == other[k] for k in self.keys())
260 def __ne__(self, other):
261 return not (self == other)
262 _record_types[fields] = _Record
263 return _record_types[fields]()
265 class ComposedType(Type):
266 def __init__(self, fields):
269 def read(self, file):
270 item = get_record(k for k, v in self.fields)
271 for key, type_ in self.fields:
272 item[key], file = type_.read(file)
275 def write(self, file, item):
276 for key, type_ in self.fields:
277 file = type_.write(file, item[key])
280 class ChecksummedType(Type):
281 def __init__(self, inner):
284 def read(self, file):
285 obj, file = self.inner.read(file)
286 data = self.inner.pack(obj)
288 checksum, file = read(file, 4)
289 if checksum != hashlib.sha256(hashlib.sha256(data).digest()).digest()[:4]:
290 raise ValueError('invalid checksum')
294 def write(self, file, item):
295 data = self.inner.pack(item)
296 return (file, data), hashlib.sha256(hashlib.sha256(data).digest()).digest()[:4]
298 class FloatingIntegerType(Type):
299 # redundancy doesn't matter here because bitcoin checks binary bits against its own computed bits
300 # so it will always be encoded 'normally' in blocks (they way bitcoin does it)
301 _inner = StructType('<I')
303 def read(self, file):
304 bits, file = self._inner.read(file)
305 target = self._bits_to_target(bits)
307 if self._target_to_bits(target) != bits:
308 raise ValueError('bits in non-canonical form')
311 def write(self, file, item):
312 return self._inner.write(file, self._target_to_bits(item))
314 def truncate_to(self, x):
315 return self._bits_to_target(self._target_to_bits(x, _check=False))
317 def _bits_to_target(self, bits2):
318 target = math.shift_left(bits2 & 0x00ffffff, 8 * ((bits2 >> 24) - 3))
320 assert target == self._bits_to_target1(struct.pack('<I', bits2))
321 assert self._target_to_bits(target, _check=False) == bits2
324 def _bits_to_target1(self, bits):
326 length = ord(bits[0])
327 return bases.string_to_natural((bits[1:] + '\0'*length)[:length])
329 def _target_to_bits(self, target, _check=True):
330 n = bases.natural_to_string(target)
331 if n and ord(n[0]) >= 128:
333 bits2 = (chr(len(n)) + (n + 3*chr(0))[:3])[::-1]
334 bits = struct.unpack('<I', bits2)[0]
336 if self._bits_to_target(bits) != target:
337 raise ValueError(repr((target, self._bits_to_target(bits, _check=False))))
340 class PossiblyNone(Type):
341 def __init__(self, none_value, inner):
342 self.none_value = none_value
345 def read(self, file):
346 value, file = self.inner.read(file)
347 return None if value == self.none_value else value, file
349 def write(self, file, item):
350 if item == self.none_value:
351 raise ValueError('none_value used')
352 return self.inner.write(file, self.none_value if item is None else item)
354 address_type = ComposedType([
355 ('services', StructType('<Q')),
356 ('address', IPV6AddressType()),
357 ('port', StructType('>H')),
360 tx_type = ComposedType([
361 ('version', StructType('<I')),
362 ('tx_ins', ListType(ComposedType([
363 ('previous_output', PossiblyNone(dict(hash=0, index=2**32 - 1), ComposedType([
364 ('hash', HashType()),
365 ('index', StructType('<I')),
367 ('script', VarStrType()),
368 ('sequence', PossiblyNone(2**32 - 1, StructType('<I'))),
370 ('tx_outs', ListType(ComposedType([
371 ('value', StructType('<Q')),
372 ('script', VarStrType()),
374 ('lock_time', StructType('<I')),
377 block_header_type = ComposedType([
378 ('version', StructType('<I')),
379 ('previous_block', PossiblyNone(0, HashType())),
380 ('merkle_root', HashType()),
381 ('timestamp', StructType('<I')),
382 ('target', FloatingIntegerType()),
383 ('nonce', StructType('<I')),
386 block_type = ComposedType([
387 ('header', block_header_type),
388 ('txs', ListType(tx_type)),
392 merkle_record_type = ComposedType([
393 ('left', HashType()),
394 ('right', HashType()),
397 def merkle_hash(tx_list):
400 hash_list = map(tx_type.hash256, tx_list)
401 while len(hash_list) > 1:
402 hash_list = [merkle_record_type.hash256(dict(left=left, right=left if right is None else right))
403 for left, right in zip(hash_list[::2], hash_list[1::2] + [None])]
406 def target_to_average_attempts(target):
407 return 2**256//(target + 1)
411 def tx_get_sigop_count(tx):
412 return sum(script.get_sigop_count(txin['script']) for txin in tx['tx_ins']) + sum(script.get_sigop_count(txout['script']) for txout in tx['tx_outs'])
416 human_address_type = ChecksummedType(ComposedType([
417 ('version', StructType('<B')),
418 ('pubkey_hash', ShortHashType()),
421 pubkey_type = FixedStrType(65)
423 def pubkey_hash_to_address(pubkey_hash, net):
424 return human_address_type.pack_base58(dict(version=net.BITCOIN_ADDRESS_VERSION, pubkey_hash=pubkey_hash))
426 def pubkey_to_address(pubkey, net):
427 return pubkey_hash_to_address(pubkey_type.hash160(pubkey), net)
429 def address_to_pubkey_hash(address, net):
430 x = human_address_type.unpack_base58(address)
431 if x['version'] != net.BITCOIN_ADDRESS_VERSION:
432 raise ValueError('address not for this net!')
433 return x['pubkey_hash']
437 def pubkey_to_script2(pubkey):
438 return ('\x41' + pubkey_type.pack(pubkey)) + '\xac'
440 def pubkey_hash_to_script2(pubkey_hash):
441 return '\x76\xa9' + ('\x14' + ShortHashType().pack(pubkey_hash)) + '\x88\xac'
443 def script2_to_human(script2, net):
445 pubkey = script2[1:-1]
446 script2_test = pubkey_to_script2(pubkey)
450 if script2_test == script2:
451 return 'Pubkey. Address: %s' % (pubkey_to_address(pubkey, net),)
454 pubkey_hash = ShortHashType().unpack(script2[3:-2])
455 script2_test2 = pubkey_hash_to_script2(pubkey_hash)
459 if script2_test2 == script2:
460 return 'Address. Address: %s' % (pubkey_hash_to_address(pubkey_hash, net),)
462 return 'Unknown. Script: %s' % (script2.encode('hex'),)
464 # linked list tracker
466 class Tracker(object):
468 self.shares = {} # hash -> share
469 #self.ids = {} # hash -> (id, height)
470 self.reverse_shares = {} # previous_hash -> set of share_hashes
472 self.heads = {} # head hash -> tail_hash
473 self.tails = {} # tail hash -> set of head hashes
475 self.heights = {} # share_hash -> height_to, other_share_hash
478 self.id_generator = itertools.count()
479 self.tails_by_id = {}
482 self.get_nth_parent_hash = skiplists.DistanceSkipList(self)
484 self.added = variable.Event()
485 self.removed = variable.Event()
487 def add(self, share):
488 assert not isinstance(share, (int, long, type(None)))
489 if share.hash in self.shares:
490 raise ValueError('share already present')
493 parent_id = self.ids.get(share.previous_hash, None)
494 children_ids = set(self.ids.get(share2_hash) for share2_hash in self.reverse_shares.get(share.hash, set()))
496 if parent_id is not None:
497 infos.add((parent_id[0], parent_id[1] + 1))
498 for child_id in children_ids:
499 infos.add((child_id[0], child_id[1] - 1))
501 infos.add((self.id_generator.next(), 0))
503 self.ids[share.hash] = chosen
506 self.shares[share.hash] = share
507 self.reverse_shares.setdefault(share.previous_hash, set()).add(share.hash)
509 if share.hash in self.tails:
510 heads = self.tails.pop(share.hash)
512 heads = set([share.hash])
514 if share.previous_hash in self.heads:
515 tail = self.heads.pop(share.previous_hash)
517 #dist, tail = self.get_height_and_last(share.previous_hash) # XXX this should be moved out of the critical area even though it shouldn't matter
518 tail = share.previous_hash
519 while tail in self.shares:
520 tail = self.shares[tail].previous_hash
522 self.tails.setdefault(tail, set()).update(heads)
523 if share.previous_hash in self.tails[tail]:
524 self.tails[tail].remove(share.previous_hash)
527 self.heads[head] = tail
529 self.added.happened(share)
533 for s in self.shares.itervalues():
536 assert self.shares == t.shares, (self.shares, t.shares)
537 assert self.reverse_shares == t.reverse_shares, (self.reverse_shares, t.reverse_shares)
538 assert self.heads == t.heads, (self.heads, t.heads)
539 assert self.tails == t.tails, (self.tails, t.tails)
541 def remove(self, share_hash):
542 assert isinstance(share_hash, (int, long, type(None)))
543 if share_hash not in self.shares:
545 share = self.shares[share_hash]
546 children = self.reverse_shares.get(share_hash, set())
549 if share.hash in self.heads and share.previous_hash in self.tails:
550 tail = self.heads.pop(share.hash)
551 self.tails[tail].remove(share.hash)
552 if not self.tails[share.previous_hash]:
553 self.tails.pop(share.previous_hash)
554 elif share.hash in self.heads:
555 tail = self.heads.pop(share.hash)
556 self.tails[tail].remove(share.hash)
557 if self.reverse_shares[share.previous_hash] != set([share.hash]):
560 self.tails[tail].add(share.previous_hash)
561 self.heads[share.previous_hash] = tail
562 elif share.previous_hash in self.tails:
563 #raise NotImplementedError() # will break other things..
564 heads = self.tails[share.previous_hash]
565 if len(self.reverse_shares[share.previous_hash]) > 1:
566 raise NotImplementedError()
568 del self.tails[share.previous_hash]
570 self.heads[head] = share.hash
571 self.tails[share.hash] = set(heads)
573 raise NotImplementedError()
576 for share_hash2 in self.heights:
577 height_to, other_share_hash, work_inc = self.heights[share_hash2]
578 if other_share_hash != share.previous_hash:
581 if len(children) == 1:
583 other_share_hash = share.hash
584 work_inc -= target_to_average_attempts(share.target)
585 self.heights[share_hash2] = height_to, other_share_hash, work_inc
587 to_remove.add(share_hash2)
588 for share_hash2 in to_remove:
589 del self.heights[share_hash2]
590 if share.hash in self.heights:
591 del self.heights[share.hash]
594 height, tail = self.get_height_and_last(share.hash)
596 if share.hash in self.heads:
597 my_heads = set([share.hash])
598 elif share.previous_hash in self.tails:
599 my_heads = self.tails[share.previous_hash]
601 some_heads = self.tails[tail]
602 some_heads_heights = dict((that_head, self.get_height_and_last(that_head)[0]) for that_head in some_heads)
603 my_heads = set(that_head for that_head in some_heads
604 if some_heads_heights[that_head] > height and
605 self.get_nth_parent_hash(that_head, some_heads_heights[that_head] - height) == share.hash)
607 if share.previous_hash != tail:
608 self.heads[share.previous_hash] = tail
610 for head in my_heads:
611 if head != share.hash:
612 self.heads[head] = share.hash
616 if share.hash in self.heads:
617 self.heads.pop(share.hash)
620 self.tails[tail].difference_update(my_heads)
621 if share.previous_hash != tail:
622 self.tails[tail].add(share.previous_hash)
623 if not self.tails[tail]:
625 if my_heads != set([share.hash]):
626 self.tails[share.hash] = set(my_heads) - set([share.hash])
629 self.shares.pop(share.hash)
630 self.reverse_shares[share.previous_hash].remove(share.hash)
631 if not self.reverse_shares[share.previous_hash]:
632 self.reverse_shares.pop(share.previous_hash)
634 #assert self.test() is None
635 self.removed.happened(share)
637 def get_height(self, share_hash):
638 height, work, last = self.get_height_work_and_last(share_hash)
641 def get_work(self, share_hash):
642 height, work, last = self.get_height_work_and_last(share_hash)
645 def get_last(self, share_hash):
646 height, work, last = self.get_height_work_and_last(share_hash)
649 def get_height_and_last(self, share_hash):
650 height, work, last = self.get_height_work_and_last(share_hash)
653 def get_height_work_and_last(self, share_hash):
654 assert isinstance(share_hash, (int, long, type(None)))
660 if share_hash is None or share_hash not in self.shares:
662 updates.append((share_hash, height, work))
663 if share_hash in self.heights:
664 height_inc, share_hash, work_inc = self.heights[share_hash]
666 height_inc, share_hash, work_inc = 1, self.shares[share_hash].previous_hash, target_to_average_attempts(self.shares[share_hash].target)
669 for update_hash, height_then, work_then in updates:
670 self.heights[update_hash] = height - height_then, share_hash, work - work_then
671 return height, work, share_hash
673 def get_chain_known(self, start_hash):
674 assert isinstance(start_hash, (int, long, type(None)))
676 Chain starting with item of hash I{start_hash} of items that this Tracker contains
678 item_hash_to_get = start_hash
680 if item_hash_to_get not in self.shares:
682 share = self.shares[item_hash_to_get]
683 assert not isinstance(share, long)
685 item_hash_to_get = share.previous_hash
687 def get_chain_to_root(self, start_hash, root=None):
688 assert isinstance(start_hash, (int, long, type(None)))
689 assert isinstance(root, (int, long, type(None)))
691 Chain of hashes starting with share_hash of shares to the root (doesn't include root)
692 Raises an error if one is missing
694 share_hash_to_get = start_hash
695 while share_hash_to_get != root:
696 share = self.shares[share_hash_to_get]
698 share_hash_to_get = share.previous_hash
700 def get_best_hash(self):
702 Returns hash of item with the most items in its chain
706 return max(self.heads, key=self.get_height_and_last)
708 def get_highest_height(self):
709 return max(self.get_height_and_last(head)[0] for head in self.heads) if self.heads else 0
711 class FakeShare(object):
712 def __init__(self, **kwargs):
713 self.__dict__.update(kwargs)
715 if __name__ == '__main__':
719 for i in xrange(10000):
720 t.add(FakeShare(hash=i, previous_hash=i - 1 if i > 0 else None))
724 print 'HEADS', t.heads
725 print 'TAILS', t.tails
734 for i in xrange(random.randrange(100)):
735 x = random.choice(list(t.shares) + [None])
737 t.add(FakeShare(i, x))
739 x = random.choice(list(t.shares))
740 print 'DEL', x, t.__dict__
743 except NotImplementedError:
744 print 'aborted; not implemented'
747 print 'HEADS', t.heads
748 print 'TAILS', t.tails
750 #for share_hash, share in sorted(t.shares.iteritems()):
751 # print share_hash, share.previous_hash, t.heads.get(share_hash), t.tails.get(share_hash)
753 #import sys;sys.exit()
755 print t.get_nth_parent_hash(9000, 5000)
756 print t.get_nth_parent_hash(9001, 412)
757 #print t.get_nth_parent_hash(90, 51)
759 for share_hash in sorted(t.shares):
760 print str(share_hash).rjust(4),
761 x = t.skips.get(share_hash, None)
763 print str(x[0]).rjust(4),
765 print str(a).rjust(10),
768 # network definitions
770 class Mainnet(object):
771 BITCOIN_P2P_PREFIX = 'f9beb4d9'.decode('hex')
772 BITCOIN_P2P_PORT = 8333
773 BITCOIN_ADDRESS_VERSION = 0
775 class Testnet(object):
776 BITCOIN_P2P_PREFIX = 'fabfb5da'.decode('hex')
777 BITCOIN_P2P_PORT = 18333
778 BITCOIN_ADDRESS_VERSION = 111