1 from __future__ import division
7 from . import base58, skiplists
8 from p2pool.util import bases, math, variable
11 class EarlyEnd(Exception):
14 class LateEnd(Exception):
17 def read((data, pos), length):
18 data2 = data[pos:pos + length]
19 if len(data2) != length:
21 return data2, (data, pos + length)
23 def size((data, pos)):
24 return len(data) - pos
27 # the same data can have only one unpacked representation, but multiple packed binary representations
30 # return hash(tuple(self.__dict__.items()))
32 #def __eq__(self, other):
33 # if not isinstance(other, Type):
34 # raise NotImplementedError()
35 # return self.__dict__ == other.__dict__
37 def _unpack(self, data):
38 obj, (data2, pos) = self.read((data, 0))
48 f = self.write(None, obj)
58 def unpack(self, data):
59 obj = self._unpack(data)
62 data2 = self._pack(obj)
64 if self._unpack(data2) != obj:
65 raise AssertionError()
70 data = self._pack(obj)
73 if self._unpack(data) != obj:
74 raise AssertionError()
79 def pack_base58(self, obj):
80 return base58.base58_encode(self.pack(obj))
82 def unpack_base58(self, base58_data):
83 return self.unpack(base58.base58_decode(base58_data))
86 def hash160(self, obj):
87 return ShortHashType().unpack(hashlib.new('ripemd160', hashlib.sha256(self.pack(obj)).digest()).digest())
89 def hash256(self, obj):
90 return HashType().unpack(hashlib.sha256(hashlib.sha256(self.pack(obj)).digest()).digest())
92 class VarIntType(Type):
93 # redundancy doesn't matter here because bitcoin and p2pool both reencode before hashing
95 data, file = read(file, 1)
100 desc, length = '<H', 2
102 desc, length = '<I', 4
104 desc, length = '<Q', 8
106 raise AssertionError()
107 data, file = read(file, length)
108 return struct.unpack(desc, data)[0], file
110 def write(self, file, item):
112 file = file, struct.pack('<B', item)
114 file = file, struct.pack('<BH', 0xfd, item)
115 elif item <= 0xffffffff:
116 file = file, struct.pack('<BI', 0xfe, item)
117 elif item <= 0xffffffffffffffff:
118 file = file, struct.pack('<BQ', 0xff, item)
120 raise ValueError('int too large for varint')
123 class VarStrType(Type):
124 _inner_size = VarIntType()
126 def read(self, file):
127 length, file = self._inner_size.read(file)
128 return read(file, length)
130 def write(self, file, item):
131 return self._inner_size.write(file, len(item)), item
133 class FixedStrType(Type):
134 def __init__(self, length):
137 def read(self, file):
138 return read(file, self.length)
140 def write(self, file, item):
141 if len(item) != self.length:
142 raise ValueError('incorrect length item!')
145 class EnumType(Type):
146 def __init__(self, inner, values):
151 for k, v in values.iteritems():
153 raise ValueError('duplicate value in values')
156 def read(self, file):
157 data, file = self.inner.read(file)
158 if data not in self.keys:
159 raise ValueError('enum data (%r) not in values (%r)' % (data, self.values))
160 return self.keys[data], file
162 def write(self, file, item):
163 if item not in self.values:
164 raise ValueError('enum item (%r) not in values (%r)' % (item, self.values))
165 return self.inner.write(file, self.values[item])
167 class HashType(Type):
168 def read(self, file):
169 data, file = read(file, 256//8)
170 return int(data[::-1].encode('hex'), 16), file
172 def write(self, file, item):
173 if not 0 <= item < 2**256:
174 raise ValueError('invalid hash value - %r' % (item,))
175 if item != 0 and item < 2**160:
176 print 'Very low hash value - maybe you meant to use ShortHashType? %x' % (item,)
177 return file, ('%064x' % (item,)).decode('hex')[::-1]
179 class ShortHashType(Type):
180 def read(self, file):
181 data, file = read(file, 160//8)
182 return int(data[::-1].encode('hex'), 16), file
184 def write(self, file, item):
185 if not 0 <= item < 2**160:
186 raise ValueError('invalid hash value - %r' % (item,))
187 return file, ('%040x' % (item,)).decode('hex')[::-1]
189 class ListType(Type):
190 _inner_size = VarIntType()
192 def __init__(self, type):
195 def read(self, file):
196 length, file = self._inner_size.read(file)
198 for i in xrange(length):
199 item, file = self.type.read(file)
203 def write(self, file, item):
204 file = self._inner_size.write(file, len(item))
206 file = self.type.write(file, subitem)
209 class StructType(Type):
210 def __init__(self, desc):
212 self.length = struct.calcsize(self.desc)
214 def read(self, file):
215 data, file = read(file, self.length)
216 res, = struct.unpack(self.desc, data)
219 def write(self, file, item):
220 data = struct.pack(self.desc, item)
221 if struct.unpack(self.desc, data)[0] != item:
222 # special test because struct doesn't error on some overflows
223 raise ValueError('''item didn't survive pack cycle (%r)''' % (item,))
226 class IPV6AddressType(Type):
227 def read(self, file):
228 data, file = read(file, 16)
229 if data[:12] != '00000000000000000000ffff'.decode('hex'):
230 raise ValueError('ipv6 addresses not supported yet')
231 return '.'.join(str(ord(x)) for x in data[12:]), file
233 def write(self, file, item):
234 bits = map(int, item.split('.'))
236 raise ValueError('invalid address: %r' % (bits,))
237 data = '00000000000000000000ffff'.decode('hex') + ''.join(chr(x) for x in bits)
238 assert len(data) == 16, len(data)
243 def get_record(fields):
244 fields = tuple(sorted(fields))
247 if fields not in _record_types:
248 class _Record(object):
250 def __getitem__(self, key):
251 return getattr(self, key)
252 def __setitem__(self, key, value):
253 setattr(self, key, value)
255 # for field in self.__slots__:
256 # yield field, getattr(self, field)
258 return self.__slots__
259 def __eq__(self, other):
260 if isinstance(other, dict):
261 return dict(self) == other
262 elif isinstance(other, _Record):
263 return all(self[k] == other[k] for k in self.keys())
265 def __ne__(self, other):
266 return not (self == other)
267 _record_types[fields] = _Record
268 return _record_types[fields]()
270 class ComposedType(Type):
271 def __init__(self, fields):
274 def read(self, file):
275 item = get_record(k for k, v in self.fields)
276 for key, type_ in self.fields:
277 item[key], file = type_.read(file)
280 def write(self, file, item):
281 for key, type_ in self.fields:
282 file = type_.write(file, item[key])
285 class ChecksummedType(Type):
286 def __init__(self, inner):
289 def read(self, file):
290 obj, file = self.inner.read(file)
291 data = self.inner.pack(obj)
293 checksum, file = read(file, 4)
294 if checksum != hashlib.sha256(hashlib.sha256(data).digest()).digest()[:4]:
295 raise ValueError('invalid checksum')
299 def write(self, file, item):
300 data = self.inner.pack(item)
301 return (file, data), hashlib.sha256(hashlib.sha256(data).digest()).digest()[:4]
303 class FloatingInteger(object):
304 __slots__ = ['_bits']
307 def from_target_upper_bound(cls, target):
308 n = bases.natural_to_string(target)
309 if n and ord(n[0]) >= 128:
311 bits2 = (chr(len(n)) + (n + 3*chr(0))[:3])[::-1]
312 bits = struct.unpack('<I', bits2)[0]
315 def __init__(self, bits):
320 return math.shift_left(self._bits & 0x00ffffff, 8 * ((self._bits >> 24) - 3))
323 return hash(self._value)
325 def __cmp__(self, other):
326 if isinstance(other, FloatingInteger):
327 return cmp(self._value, other._value)
328 elif isinstance(other, (int, long)):
329 return cmp(self._value, other)
331 raise NotImplementedError()
337 return 'FloatingInteger(bits=%s (%x))' % (hex(self._bits), self)
339 def __add__(self, other):
340 if isinstance(other, (int, long)):
341 return self._value + other
342 raise NotImplementedError()
344 def __mul__(self, other):
345 if isinstance(other, (int, long)):
346 return self._value * other
347 raise NotImplementedError()
349 def __truediv__(self, other):
350 if isinstance(other, (int, long)):
351 return self._value / other
352 raise NotImplementedError()
353 def __floordiv__(self, other):
354 if isinstance(other, (int, long)):
355 return self._value // other
356 raise NotImplementedError()
357 __div__ = __truediv__
358 def __rtruediv__(self, other):
359 if isinstance(other, (int, long)):
360 return other / self._value
361 raise NotImplementedError()
362 def __rfloordiv__(self, other):
363 if isinstance(other, (int, long)):
364 return other // self._value
365 raise NotImplementedError()
366 __rdiv__ = __rtruediv__
368 class FloatingIntegerType(Type):
369 _inner = StructType('<I')
371 def read(self, file):
372 bits, file = self._inner.read(file)
373 return FloatingInteger(bits), file
375 def write(self, file, item):
376 return self._inner.write(file, item._bits)
378 class PossiblyNone(Type):
379 def __init__(self, none_value, inner):
380 self.none_value = none_value
383 def read(self, file):
384 value, file = self.inner.read(file)
385 return None if value == self.none_value else value, file
387 def write(self, file, item):
388 if item == self.none_value:
389 raise ValueError('none_value used')
390 return self.inner.write(file, self.none_value if item is None else item)
392 address_type = ComposedType([
393 ('services', StructType('<Q')),
394 ('address', IPV6AddressType()),
395 ('port', StructType('>H')),
398 tx_type = ComposedType([
399 ('version', StructType('<I')),
400 ('tx_ins', ListType(ComposedType([
401 ('previous_output', PossiblyNone(dict(hash=0, index=2**32 - 1), ComposedType([
402 ('hash', HashType()),
403 ('index', StructType('<I')),
405 ('script', VarStrType()),
406 ('sequence', PossiblyNone(2**32 - 1, StructType('<I'))),
408 ('tx_outs', ListType(ComposedType([
409 ('value', StructType('<Q')),
410 ('script', VarStrType()),
412 ('lock_time', StructType('<I')),
415 block_header_type = ComposedType([
416 ('version', StructType('<I')),
417 ('previous_block', PossiblyNone(0, HashType())),
418 ('merkle_root', HashType()),
419 ('timestamp', StructType('<I')),
420 ('target', FloatingIntegerType()),
421 ('nonce', StructType('<I')),
424 block_type = ComposedType([
425 ('header', block_header_type),
426 ('txs', ListType(tx_type)),
430 merkle_record_type = ComposedType([
431 ('left', HashType()),
432 ('right', HashType()),
435 def merkle_hash(tx_list):
438 hash_list = map(tx_type.hash256, tx_list)
439 while len(hash_list) > 1:
440 hash_list = [merkle_record_type.hash256(dict(left=left, right=left if right is None else right))
441 for left, right in zip(hash_list[::2], hash_list[1::2] + [None])]
444 def target_to_average_attempts(target):
445 return 2**256//(target + 1)
449 def tx_get_sigop_count(tx):
450 return sum(script.get_sigop_count(txin['script']) for txin in tx['tx_ins']) + sum(script.get_sigop_count(txout['script']) for txout in tx['tx_outs'])
454 human_address_type = ChecksummedType(ComposedType([
455 ('version', StructType('<B')),
456 ('pubkey_hash', ShortHashType()),
459 pubkey_type = FixedStrType(65)
461 def pubkey_hash_to_address(pubkey_hash, net):
462 return human_address_type.pack_base58(dict(version=net.BITCOIN_ADDRESS_VERSION, pubkey_hash=pubkey_hash))
464 def pubkey_to_address(pubkey, net):
465 return pubkey_hash_to_address(pubkey_type.hash160(pubkey), net)
467 def address_to_pubkey_hash(address, net):
468 x = human_address_type.unpack_base58(address)
469 if x['version'] != net.BITCOIN_ADDRESS_VERSION:
470 raise ValueError('address not for this net!')
471 return x['pubkey_hash']
475 def pubkey_to_script2(pubkey):
476 return ('\x41' + pubkey_type.pack(pubkey)) + '\xac'
478 def pubkey_hash_to_script2(pubkey_hash):
479 return '\x76\xa9' + ('\x14' + ShortHashType().pack(pubkey_hash)) + '\x88\xac'
481 def script2_to_human(script2, net):
483 pubkey = script2[1:-1]
484 script2_test = pubkey_to_script2(pubkey)
488 if script2_test == script2:
489 return 'Pubkey. Address: %s' % (pubkey_to_address(pubkey, net),)
492 pubkey_hash = ShortHashType().unpack(script2[3:-2])
493 script2_test2 = pubkey_hash_to_script2(pubkey_hash)
497 if script2_test2 == script2:
498 return 'Address. Address: %s' % (pubkey_hash_to_address(pubkey_hash, net),)
500 return 'Unknown. Script: %s' % (script2.encode('hex'),)
502 # linked list tracker
504 class Tracker(object):
506 self.shares = {} # hash -> share
507 #self.ids = {} # hash -> (id, height)
508 self.reverse_shares = {} # previous_hash -> set of share_hashes
510 self.heads = {} # head hash -> tail_hash
511 self.tails = {} # tail hash -> set of head hashes
513 self.heights = {} # share_hash -> height_to, ref, work_inc
514 self.reverse_heights = {} # ref -> set of share_hashes
516 self.ref_generator = itertools.count()
517 self.height_refs = {} # ref -> height, share_hash, work_inc
518 self.reverse_height_refs = {} # share_hash -> ref
520 self.get_nth_parent_hash = skiplists.DistanceSkipList(self)
522 self.added = variable.Event()
523 self.removed = variable.Event()
525 def add(self, share):
526 assert not isinstance(share, (int, long, type(None)))
527 if share.hash in self.shares:
528 raise ValueError('share already present')
530 if share.hash in self.tails:
531 heads = self.tails.pop(share.hash)
533 heads = set([share.hash])
535 if share.previous_hash in self.heads:
536 tail = self.heads.pop(share.previous_hash)
538 tail = self.get_last(share.previous_hash)
539 #tail2 = share.previous_hash
540 #while tail2 in self.shares:
541 # tail2 = self.shares[tail2].previous_hash
542 #assert tail == tail2
544 self.shares[share.hash] = share
545 self.reverse_shares.setdefault(share.previous_hash, set()).add(share.hash)
547 self.tails.setdefault(tail, set()).update(heads)
548 if share.previous_hash in self.tails[tail]:
549 self.tails[tail].remove(share.previous_hash)
552 self.heads[head] = tail
554 self.added.happened(share)
558 for s in self.shares.itervalues():
561 assert self.shares == t.shares, (self.shares, t.shares)
562 assert self.reverse_shares == t.reverse_shares, (self.reverse_shares, t.reverse_shares)
563 assert self.heads == t.heads, (self.heads, t.heads)
564 assert self.tails == t.tails, (self.tails, t.tails)
566 def remove(self, share_hash):
567 assert isinstance(share_hash, (int, long, type(None)))
568 if share_hash not in self.shares:
571 share = self.shares[share_hash]
574 children = self.reverse_shares.get(share.hash, set())
576 # move height refs referencing children down to this, so they can be moved up in one step
577 if share.previous_hash in self.reverse_height_refs:
578 for x in list(self.reverse_heights.get(self.reverse_height_refs.get(share.hash, object()), set())):
580 assert share.hash not in self.reverse_height_refs, list(self.reverse_heights.get(self.reverse_height_refs.get(share.hash, None), set()))
582 if share.hash in self.heads and share.previous_hash in self.tails:
583 tail = self.heads.pop(share.hash)
584 self.tails[tail].remove(share.hash)
585 if not self.tails[share.previous_hash]:
586 self.tails.pop(share.previous_hash)
587 elif share.hash in self.heads:
588 tail = self.heads.pop(share.hash)
589 self.tails[tail].remove(share.hash)
590 if self.reverse_shares[share.previous_hash] != set([share.hash]):
593 self.tails[tail].add(share.previous_hash)
594 self.heads[share.previous_hash] = tail
595 elif share.previous_hash in self.tails:
596 heads = self.tails[share.previous_hash]
597 if len(self.reverse_shares[share.previous_hash]) > 1:
598 raise NotImplementedError()
600 del self.tails[share.previous_hash]
602 self.heads[head] = share.hash
603 self.tails[share.hash] = set(heads)
605 raise NotImplementedError()
607 # move ref pointing to this up
608 if share.previous_hash in self.reverse_height_refs:
609 assert share.hash not in self.reverse_height_refs, list(self.reverse_heights.get(self.reverse_height_refs.get(share.hash, object()), set()))
611 ref = self.reverse_height_refs[share.previous_hash]
612 cur_height, cur_hash, cur_work = self.height_refs[ref]
613 assert cur_hash == share.previous_hash
614 self.height_refs[ref] = cur_height - 1, share.hash, cur_work - target_to_average_attempts(share.target)
615 del self.reverse_height_refs[share.previous_hash]
616 self.reverse_height_refs[share.hash] = ref
618 # delete height entry, and ref if it is empty
619 if share.hash in self.heights:
620 _, ref, _ = self.heights.pop(share.hash)
621 self.reverse_heights[ref].remove(share.hash)
622 if not self.reverse_heights[ref]:
623 del self.reverse_heights[ref]
624 _, ref_hash, _ = self.height_refs.pop(ref)
625 del self.reverse_height_refs[ref_hash]
627 self.shares.pop(share.hash)
628 self.reverse_shares[share.previous_hash].remove(share.hash)
629 if not self.reverse_shares[share.previous_hash]:
630 self.reverse_shares.pop(share.previous_hash)
632 #assert self.test() is None
633 self.removed.happened(share)
635 def get_height(self, share_hash):
636 height, work, last = self.get_height_work_and_last(share_hash)
639 def get_work(self, share_hash):
640 height, work, last = self.get_height_work_and_last(share_hash)
643 def get_last(self, share_hash):
644 height, work, last = self.get_height_work_and_last(share_hash)
647 def get_height_and_last(self, share_hash):
648 height, work, last = self.get_height_work_and_last(share_hash)
651 def _get_height_jump(self, share_hash):
652 if share_hash in self.heights:
653 height_to1, ref, work_inc1 = self.heights[share_hash]
654 height_to2, share_hash, work_inc2 = self.height_refs[ref]
655 height_inc = height_to1 + height_to2
656 work_inc = work_inc1 + work_inc2
658 height_inc, share_hash, work_inc = 1, self.shares[share_hash].previous_hash, target_to_average_attempts(self.shares[share_hash].target)
659 return height_inc, share_hash, work_inc
661 def _set_height_jump(self, share_hash, height_inc, other_share_hash, work_inc):
662 if other_share_hash not in self.reverse_height_refs:
663 ref = self.ref_generator.next()
664 assert ref not in self.height_refs
665 self.height_refs[ref] = 0, other_share_hash, 0
666 self.reverse_height_refs[other_share_hash] = ref
669 ref = self.reverse_height_refs[other_share_hash]
670 ref_height_to, ref_share_hash, ref_work_inc = self.height_refs[ref]
671 assert ref_share_hash == other_share_hash
673 if share_hash in self.heights:
674 prev_ref = self.heights[share_hash][1]
675 self.reverse_heights[prev_ref].remove(share_hash)
676 if not self.reverse_heights[prev_ref] and prev_ref != ref:
677 self.reverse_heights.pop(prev_ref)
678 _, x, _ = self.height_refs.pop(prev_ref)
679 self.reverse_height_refs.pop(x)
680 self.heights[share_hash] = height_inc - ref_height_to, ref, work_inc - ref_work_inc
681 self.reverse_heights.setdefault(ref, set()).add(share_hash)
683 def get_height_work_and_last(self, share_hash):
684 assert isinstance(share_hash, (int, long, type(None)))
689 while share_hash in self.shares:
690 updates.append((share_hash, height, work))
691 height_inc, share_hash, work_inc = self._get_height_jump(share_hash)
694 for update_hash, height_then, work_then in updates:
695 self._set_height_jump(update_hash, height - height_then, share_hash, work - work_then)
696 return height, work, share_hash
698 def get_chain_known(self, start_hash):
699 assert isinstance(start_hash, (int, long, type(None)))
701 Chain starting with item of hash I{start_hash} of items that this Tracker contains
703 item_hash_to_get = start_hash
705 if item_hash_to_get not in self.shares:
707 share = self.shares[item_hash_to_get]
708 assert not isinstance(share, long)
710 item_hash_to_get = share.previous_hash
712 def get_chain_to_root(self, start_hash, root=None):
713 assert isinstance(start_hash, (int, long, type(None)))
714 assert isinstance(root, (int, long, type(None)))
716 Chain of hashes starting with share_hash of shares to the root (doesn't include root)
717 Raises an error if one is missing
719 share_hash_to_get = start_hash
720 while share_hash_to_get != root:
721 share = self.shares[share_hash_to_get]
723 share_hash_to_get = share.previous_hash
725 def get_best_hash(self):
727 Returns hash of item with the most items in its chain
731 return max(self.heads, key=self.get_height_and_last)
733 def get_highest_height(self):
734 return max(self.get_height_and_last(head)[0] for head in self.heads) if self.heads else 0
736 class FakeShare(object):
737 def __init__(self, **kwargs):
738 self.__dict__.update(kwargs)
740 if __name__ == '__main__':
744 for i in xrange(10000):
745 t.add(FakeShare(hash=i, previous_hash=i - 1 if i > 0 else None))
749 print 'HEADS', t.heads
750 print 'TAILS', t.tails
759 for i in xrange(random.randrange(100)):
760 x = random.choice(list(t.shares) + [None])
762 t.add(FakeShare(i, x))
764 x = random.choice(list(t.shares))
765 print 'DEL', x, t.__dict__
768 except NotImplementedError:
769 print 'aborted; not implemented'
772 print 'HEADS', t.heads
773 print 'TAILS', t.tails
775 #for share_hash, share in sorted(t.shares.iteritems()):
776 # print share_hash, share.previous_hash, t.heads.get(share_hash), t.tails.get(share_hash)
778 #import sys;sys.exit()
780 print t.get_nth_parent_hash(9000, 5000)
781 print t.get_nth_parent_hash(9001, 412)
782 #print t.get_nth_parent_hash(90, 51)
784 for share_hash in sorted(t.shares):
785 print str(share_hash).rjust(4),
786 x = t.skips.get(share_hash, None)
788 print str(x[0]).rjust(4),
790 print str(a).rjust(10),
793 # network definitions
795 class Mainnet(object):
796 BITCOIN_P2P_PREFIX = 'f9beb4d9'.decode('hex')
797 BITCOIN_P2P_PORT = 8333
798 BITCOIN_ADDRESS_VERSION = 0
800 class Testnet(object):
801 BITCOIN_P2P_PREFIX = 'fabfb5da'.decode('hex')
802 BITCOIN_P2P_PORT = 18333
803 BITCOIN_ADDRESS_VERSION = 111