1 from __future__ import division
7 from twisted.internet import defer
9 from . import base58, skiplists
10 from p2pool.util import bases, math, variable, expiring_dict, memoize, dicts
13 class EarlyEnd(Exception):
16 class LateEnd(Exception):
19 def read((data, pos), length):
20 data2 = data[pos:pos + length]
21 if len(data2) != length:
23 return data2, (data, pos + length)
25 def size((data, pos)):
26 return len(data) - pos
29 # the same data can have only one unpacked representation, but multiple packed binary representations
32 rval = getattr(self, '_hash', None)
35 rval = self._hash = hash((type(self), frozenset(self.__dict__.items())))
41 def __eq__(self, other):
42 return type(other) is type(self) and other.__dict__ == self.__dict__
44 def __ne__(self, other):
45 return not (self == other)
47 def _unpack(self, data):
48 obj, (data2, pos) = self.read((data, 0))
58 f = self.write(None, obj)
68 def unpack(self, data):
69 obj = self._unpack(data)
72 data2 = self._pack(obj)
74 if self._unpack(data2) != obj:
75 raise AssertionError()
80 data = self._pack(obj)
83 if self._unpack(data) != obj:
84 raise AssertionError((self._unpack(data), obj))
88 _backing = expiring_dict.ExpiringDict(100)
89 pack2 = memoize.memoize_with_backing(_backing, [unpack])(pack2)
90 unpack = memoize.memoize_with_backing(_backing)(unpack) # doesn't have an inverse
93 return self.pack2(dicts.immutify(obj))
96 def pack_base58(self, obj):
97 return base58.base58_encode(self.pack(obj))
99 def unpack_base58(self, base58_data):
100 return self.unpack(base58.base58_decode(base58_data))
103 def hash160(self, obj):
104 return ShortHashType().unpack(hashlib.new('ripemd160', hashlib.sha256(self.pack(obj)).digest()).digest())
106 def hash256(self, obj):
107 return HashType().unpack(hashlib.sha256(hashlib.sha256(self.pack(obj)).digest()).digest())
109 def scrypt(self, obj):
111 return HashType().unpack(self.ltc_scrypt.getPoWHash(self.pack(obj)))
113 class VarIntType(Type):
114 # redundancy doesn't matter here because bitcoin and p2pool both reencode before hashing
115 def read(self, file):
116 data, file = read(file, 1)
121 desc, length = '<H', 2
123 desc, length = '<I', 4
125 desc, length = '<Q', 8
127 raise AssertionError()
128 data, file = read(file, length)
129 return struct.unpack(desc, data)[0], file
131 def write(self, file, item):
133 file = file, struct.pack('<B', item)
135 file = file, struct.pack('<BH', 0xfd, item)
136 elif item <= 0xffffffff:
137 file = file, struct.pack('<BI', 0xfe, item)
138 elif item <= 0xffffffffffffffff:
139 file = file, struct.pack('<BQ', 0xff, item)
141 raise ValueError('int too large for varint')
144 class VarStrType(Type):
145 _inner_size = VarIntType()
147 def read(self, file):
148 length, file = self._inner_size.read(file)
149 return read(file, length)
151 def write(self, file, item):
152 return self._inner_size.write(file, len(item)), item
154 class FixedStrType(Type):
155 def __init__(self, length):
158 def read(self, file):
159 return read(file, self.length)
161 def write(self, file, item):
162 if len(item) != self.length:
163 raise ValueError('incorrect length item!')
166 class EnumType(Type):
167 def __init__(self, inner, values):
169 self.values = dicts.frozendict(values)
172 for k, v in values.iteritems():
174 raise ValueError('duplicate value in values')
176 self.keys = dicts.frozendict(keys)
178 def read(self, file):
179 data, file = self.inner.read(file)
180 if data not in self.keys:
181 raise ValueError('enum data (%r) not in values (%r)' % (data, self.values))
182 return self.keys[data], file
184 def write(self, file, item):
185 if item not in self.values:
186 raise ValueError('enum item (%r) not in values (%r)' % (item, self.values))
187 return self.inner.write(file, self.values[item])
189 class HashType(Type):
190 def read(self, file):
191 data, file = read(file, 256//8)
192 return int(data[::-1].encode('hex'), 16), file
194 def write(self, file, item):
195 if not 0 <= item < 2**256:
196 raise ValueError('invalid hash value - %r' % (item,))
197 if item != 0 and item < 2**160:
198 print 'Very low hash value - maybe you meant to use ShortHashType? %x' % (item,)
199 return file, ('%064x' % (item,)).decode('hex')[::-1]
201 class ShortHashType(Type):
202 def read(self, file):
203 data, file = read(file, 160//8)
204 return int(data[::-1].encode('hex'), 16), file
206 def write(self, file, item):
207 if not 0 <= item < 2**160:
208 raise ValueError('invalid hash value - %r' % (item,))
209 return file, ('%040x' % (item,)).decode('hex')[::-1]
211 class ListType(Type):
212 _inner_size = VarIntType()
214 def __init__(self, type):
217 def read(self, file):
218 length, file = self._inner_size.read(file)
220 for i in xrange(length):
221 item, file = self.type.read(file)
225 def write(self, file, item):
226 file = self._inner_size.write(file, len(item))
228 file = self.type.write(file, subitem)
231 class StructType(Type):
232 def __init__(self, desc):
234 self.length = struct.calcsize(self.desc)
236 def read(self, file):
237 data, file = read(file, self.length)
238 res, = struct.unpack(self.desc, data)
241 def write(self, file, item):
242 data = struct.pack(self.desc, item)
243 if struct.unpack(self.desc, data)[0] != item:
244 # special test because struct doesn't error on some overflows
245 raise ValueError('''item didn't survive pack cycle (%r)''' % (item,))
248 class IPV6AddressType(Type):
249 def read(self, file):
250 data, file = read(file, 16)
251 if data[:12] != '00000000000000000000ffff'.decode('hex'):
252 raise ValueError('ipv6 addresses not supported yet')
253 return '.'.join(str(ord(x)) for x in data[12:]), file
255 def write(self, file, item):
256 bits = map(int, item.split('.'))
258 raise ValueError('invalid address: %r' % (bits,))
259 data = '00000000000000000000ffff'.decode('hex') + ''.join(chr(x) for x in bits)
260 assert len(data) == 16, len(data)
265 def get_record(fields):
266 fields = tuple(sorted(fields))
269 if fields not in _record_types:
270 class _Record(object):
273 return repr(dict(self))
274 def __getitem__(self, key):
275 return getattr(self, key)
276 def __setitem__(self, key, value):
277 setattr(self, key, value)
279 # for field in self.__slots__:
280 # yield field, getattr(self, field)
282 return self.__slots__
283 def __eq__(self, other):
284 if isinstance(other, dict):
285 return dict(self) == other
286 elif isinstance(other, _Record):
287 return all(self[k] == other[k] for k in self.keys())
289 def __ne__(self, other):
290 return not (self == other)
291 _record_types[fields] = _Record
292 return _record_types[fields]()
294 class ComposedType(Type):
295 def __init__(self, fields):
296 self.fields = tuple(fields)
298 def read(self, file):
299 item = get_record(k for k, v in self.fields)
300 for key, type_ in self.fields:
301 item[key], file = type_.read(file)
304 def write(self, file, item):
305 for key, type_ in self.fields:
306 file = type_.write(file, item[key])
309 class ChecksummedType(Type):
310 def __init__(self, inner):
313 def read(self, file):
314 obj, file = self.inner.read(file)
315 data = self.inner.pack(obj)
317 checksum, file = read(file, 4)
318 if checksum != hashlib.sha256(hashlib.sha256(data).digest()).digest()[:4]:
319 raise ValueError('invalid checksum')
323 def write(self, file, item):
324 data = self.inner.pack(item)
325 return (file, data), hashlib.sha256(hashlib.sha256(data).digest()).digest()[:4]
327 class FloatingInteger(object):
328 __slots__ = ['_bits']
331 def from_target_upper_bound(cls, target):
332 n = bases.natural_to_string(target)
333 if n and ord(n[0]) >= 128:
335 bits2 = (chr(len(n)) + (n + 3*chr(0))[:3])[::-1]
336 bits = struct.unpack('<I', bits2)[0]
339 def __init__(self, bits):
344 return math.shift_left(self._bits & 0x00ffffff, 8 * ((self._bits >> 24) - 3))
347 return hash(self._value)
349 def __cmp__(self, other):
350 if isinstance(other, FloatingInteger):
351 return cmp(self._value, other._value)
352 elif isinstance(other, (int, long)):
353 return cmp(self._value, other)
355 raise NotImplementedError()
361 return 'FloatingInteger(bits=%s (%x))' % (hex(self._bits), self)
363 def __add__(self, other):
364 if isinstance(other, (int, long)):
365 return self._value + other
366 raise NotImplementedError()
368 def __mul__(self, other):
369 if isinstance(other, (int, long)):
370 return self._value * other
371 raise NotImplementedError()
373 def __truediv__(self, other):
374 if isinstance(other, (int, long)):
375 return self._value / other
376 raise NotImplementedError()
377 def __floordiv__(self, other):
378 if isinstance(other, (int, long)):
379 return self._value // other
380 raise NotImplementedError()
381 __div__ = __truediv__
382 def __rtruediv__(self, other):
383 if isinstance(other, (int, long)):
384 return other / self._value
385 raise NotImplementedError()
386 def __rfloordiv__(self, other):
387 if isinstance(other, (int, long)):
388 return other // self._value
389 raise NotImplementedError()
390 __rdiv__ = __rtruediv__
392 class FloatingIntegerType(Type):
393 _inner = StructType('<I')
395 def read(self, file):
396 bits, file = self._inner.read(file)
397 return FloatingInteger(bits), file
399 def write(self, file, item):
400 return self._inner.write(file, item._bits)
402 class PossiblyNoneType(Type):
403 def __init__(self, none_value, inner):
404 self.none_value = none_value
407 def read(self, file):
408 value, file = self.inner.read(file)
409 return None if value == self.none_value else value, file
411 def write(self, file, item):
412 if item == self.none_value:
413 raise ValueError('none_value used')
414 return self.inner.write(file, self.none_value if item is None else item)
416 address_type = ComposedType([
417 ('services', StructType('<Q')),
418 ('address', IPV6AddressType()),
419 ('port', StructType('>H')),
422 tx_type = ComposedType([
423 ('version', StructType('<I')),
424 ('tx_ins', ListType(ComposedType([
425 ('previous_output', PossiblyNoneType(dicts.frozendict(hash=0, index=2**32 - 1), ComposedType([
426 ('hash', HashType()),
427 ('index', StructType('<I')),
429 ('script', VarStrType()),
430 ('sequence', PossiblyNoneType(2**32 - 1, StructType('<I'))),
432 ('tx_outs', ListType(ComposedType([
433 ('value', StructType('<Q')),
434 ('script', VarStrType()),
436 ('lock_time', StructType('<I')),
439 merkle_branch_type = ListType(HashType())
441 merkle_tx_type = ComposedType([
443 ('block_hash', HashType()),
444 ('merkle_branch', merkle_branch_type),
445 ('index', StructType('<i')),
448 block_header_type = ComposedType([
449 ('version', StructType('<I')),
450 ('previous_block', PossiblyNoneType(0, HashType())),
451 ('merkle_root', HashType()),
452 ('timestamp', StructType('<I')),
453 ('target', FloatingIntegerType()),
454 ('nonce', StructType('<I')),
457 block_type = ComposedType([
458 ('header', block_header_type),
459 ('txs', ListType(tx_type)),
462 aux_pow_type = ComposedType([
463 ('merkle_tx', merkle_tx_type),
464 ('merkle_branch', merkle_branch_type),
465 ('index', StructType('<i')),
466 ('parent_block_header', block_header_type),
470 merkle_record_type = ComposedType([
471 ('left', HashType()),
472 ('right', HashType()),
475 def merkle_hash(tx_list):
478 hash_list = map(tx_type.hash256, tx_list)
479 while len(hash_list) > 1:
480 hash_list = [merkle_record_type.hash256(dict(left=left, right=left if right is None else right))
481 for left, right in zip(hash_list[::2], hash_list[1::2] + [None])]
484 def calculate_merkle_branch(txs, index):
487 hash_list = [(tx_type.hash256(tx), i == index, []) for i, tx in enumerate(txs)]
489 while len(hash_list) > 1:
492 merkle_record_type.hash256(dict(left=left, right=right)),
494 (left_l if left_f else right_l) + [dict(side=1, hash=right) if left_f else dict(side=0, hash=left)],
496 for (left, left_f, left_l), (right, right_f, right_l) in
497 zip(hash_list[::2], hash_list[1::2] + [hash_list[::2][-1]])
500 res = [x['hash'] for x in hash_list[0][2]]
502 assert hash_list[0][1]
503 assert check_merkle_branch(txs[index], index, res) == hash_list[0][0]
504 assert index == sum(k*2**i for i, k in enumerate([1-x['side'] for x in hash_list[0][2]]))
508 def check_merkle_branch(tx, index, merkle_branch):
509 return reduce(lambda c, (i, h): merkle_record_type.hash256(
510 dict(left=h, right=c) if 2**i & index else
511 dict(left=c, right=h)
512 ), enumerate(merkle_branch), tx_type.hash256(tx))
514 def target_to_average_attempts(target):
515 return 2**256//(target + 1)
517 def target_to_difficulty(target):
518 return (0xffff0000 * 2**(256-64) + 1)/(target + 1)
522 def tx_get_sigop_count(tx):
523 return sum(script.get_sigop_count(txin['script']) for txin in tx['tx_ins']) + sum(script.get_sigop_count(txout['script']) for txout in tx['tx_outs'])
527 human_address_type = ChecksummedType(ComposedType([
528 ('version', StructType('<B')),
529 ('pubkey_hash', ShortHashType()),
532 pubkey_type = FixedStrType(65)
534 def pubkey_hash_to_address(pubkey_hash, net):
535 return human_address_type.pack_base58(dict(version=net.BITCOIN_ADDRESS_VERSION, pubkey_hash=pubkey_hash))
537 def pubkey_to_address(pubkey, net):
538 return pubkey_hash_to_address(pubkey_type.hash160(pubkey), net)
540 def address_to_pubkey_hash(address, net):
541 x = human_address_type.unpack_base58(address)
542 if x['version'] != net.BITCOIN_ADDRESS_VERSION:
543 raise ValueError('address not for this net!')
544 return x['pubkey_hash']
548 def pubkey_to_script2(pubkey):
549 return ('\x41' + pubkey_type.pack(pubkey)) + '\xac'
551 def pubkey_hash_to_script2(pubkey_hash):
552 return '\x76\xa9' + ('\x14' + ShortHashType().pack(pubkey_hash)) + '\x88\xac'
554 def script2_to_human(script2, net):
556 pubkey = script2[1:-1]
557 script2_test = pubkey_to_script2(pubkey)
561 if script2_test == script2:
562 return 'Pubkey. Address: %s' % (pubkey_to_address(pubkey, net),)
565 pubkey_hash = ShortHashType().unpack(script2[3:-2])
566 script2_test2 = pubkey_hash_to_script2(pubkey_hash)
570 if script2_test2 == script2:
571 return 'Address. Address: %s' % (pubkey_hash_to_address(pubkey_hash, net),)
573 return 'Unknown. Script: %s' % (script2.encode('hex'),)
575 # linked list tracker
577 class Tracker(object):
579 self.shares = {} # hash -> share
580 #self.ids = {} # hash -> (id, height)
581 self.reverse_shares = {} # previous_hash -> set of share_hashes
583 self.heads = {} # head hash -> tail_hash
584 self.tails = {} # tail hash -> set of head hashes
586 self.heights = {} # share_hash -> height_to, ref, work_inc
587 self.reverse_heights = {} # ref -> set of share_hashes
589 self.ref_generator = itertools.count()
590 self.height_refs = {} # ref -> height, share_hash, work_inc
591 self.reverse_height_refs = {} # share_hash -> ref
593 self.get_nth_parent_hash = skiplists.DistanceSkipList(self)
595 self.added = variable.Event()
596 self.removed = variable.Event()
598 def add(self, share):
599 assert not isinstance(share, (int, long, type(None)))
600 if share.hash in self.shares:
601 raise ValueError('share already present')
603 if share.hash in self.tails:
604 heads = self.tails.pop(share.hash)
606 heads = set([share.hash])
608 if share.previous_hash in self.heads:
609 tail = self.heads.pop(share.previous_hash)
611 tail = self.get_last(share.previous_hash)
612 #tail2 = share.previous_hash
613 #while tail2 in self.shares:
614 # tail2 = self.shares[tail2].previous_hash
615 #assert tail == tail2
617 self.shares[share.hash] = share
618 self.reverse_shares.setdefault(share.previous_hash, set()).add(share.hash)
620 self.tails.setdefault(tail, set()).update(heads)
621 if share.previous_hash in self.tails[tail]:
622 self.tails[tail].remove(share.previous_hash)
625 self.heads[head] = tail
627 self.added.happened(share)
631 for s in self.shares.itervalues():
634 assert self.shares == t.shares, (self.shares, t.shares)
635 assert self.reverse_shares == t.reverse_shares, (self.reverse_shares, t.reverse_shares)
636 assert self.heads == t.heads, (self.heads, t.heads)
637 assert self.tails == t.tails, (self.tails, t.tails)
639 def remove(self, share_hash):
640 assert isinstance(share_hash, (int, long, type(None)))
641 if share_hash not in self.shares:
644 share = self.shares[share_hash]
647 children = self.reverse_shares.get(share.hash, set())
649 # move height refs referencing children down to this, so they can be moved up in one step
650 if share.previous_hash in self.reverse_height_refs:
651 if share.previous_hash not in self.tails:
652 for x in list(self.reverse_heights.get(self.reverse_height_refs.get(share.previous_hash, object()), set())):
654 for x in list(self.reverse_heights.get(self.reverse_height_refs.get(share.hash, object()), set())):
656 assert share.hash not in self.reverse_height_refs, list(self.reverse_heights.get(self.reverse_height_refs.get(share.hash, None), set()))
658 if share.hash in self.heads and share.previous_hash in self.tails:
659 tail = self.heads.pop(share.hash)
660 self.tails[tail].remove(share.hash)
661 if not self.tails[share.previous_hash]:
662 self.tails.pop(share.previous_hash)
663 elif share.hash in self.heads:
664 tail = self.heads.pop(share.hash)
665 self.tails[tail].remove(share.hash)
666 if self.reverse_shares[share.previous_hash] != set([share.hash]):
669 self.tails[tail].add(share.previous_hash)
670 self.heads[share.previous_hash] = tail
671 elif share.previous_hash in self.tails:
672 heads = self.tails[share.previous_hash]
673 if len(self.reverse_shares[share.previous_hash]) > 1:
674 raise NotImplementedError()
676 del self.tails[share.previous_hash]
678 self.heads[head] = share.hash
679 self.tails[share.hash] = set(heads)
681 raise NotImplementedError()
683 # move ref pointing to this up
684 if share.previous_hash in self.reverse_height_refs:
685 assert share.hash not in self.reverse_height_refs, list(self.reverse_heights.get(self.reverse_height_refs.get(share.hash, object()), set()))
687 ref = self.reverse_height_refs[share.previous_hash]
688 cur_height, cur_hash, cur_work = self.height_refs[ref]
689 assert cur_hash == share.previous_hash
690 self.height_refs[ref] = cur_height - 1, share.hash, cur_work - target_to_average_attempts(share.target)
691 del self.reverse_height_refs[share.previous_hash]
692 self.reverse_height_refs[share.hash] = ref
694 # delete height entry, and ref if it is empty
695 if share.hash in self.heights:
696 _, ref, _ = self.heights.pop(share.hash)
697 self.reverse_heights[ref].remove(share.hash)
698 if not self.reverse_heights[ref]:
699 del self.reverse_heights[ref]
700 _, ref_hash, _ = self.height_refs.pop(ref)
701 del self.reverse_height_refs[ref_hash]
703 self.shares.pop(share.hash)
704 self.reverse_shares[share.previous_hash].remove(share.hash)
705 if not self.reverse_shares[share.previous_hash]:
706 self.reverse_shares.pop(share.previous_hash)
708 #assert self.test() is None
709 self.removed.happened(share)
711 def get_height(self, share_hash):
712 height, work, last = self.get_height_work_and_last(share_hash)
715 def get_work(self, share_hash):
716 height, work, last = self.get_height_work_and_last(share_hash)
719 def get_last(self, share_hash):
720 height, work, last = self.get_height_work_and_last(share_hash)
723 def get_height_and_last(self, share_hash):
724 height, work, last = self.get_height_work_and_last(share_hash)
727 def _get_height_jump(self, share_hash):
728 if share_hash in self.heights:
729 height_to1, ref, work_inc1 = self.heights[share_hash]
730 height_to2, share_hash, work_inc2 = self.height_refs[ref]
731 height_inc = height_to1 + height_to2
732 work_inc = work_inc1 + work_inc2
734 height_inc, share_hash, work_inc = 1, self.shares[share_hash].previous_hash, target_to_average_attempts(self.shares[share_hash].target)
735 return height_inc, share_hash, work_inc
737 def _set_height_jump(self, share_hash, height_inc, other_share_hash, work_inc):
738 if other_share_hash not in self.reverse_height_refs:
739 ref = self.ref_generator.next()
740 assert ref not in self.height_refs
741 self.height_refs[ref] = 0, other_share_hash, 0
742 self.reverse_height_refs[other_share_hash] = ref
745 ref = self.reverse_height_refs[other_share_hash]
746 ref_height_to, ref_share_hash, ref_work_inc = self.height_refs[ref]
747 assert ref_share_hash == other_share_hash
749 if share_hash in self.heights:
750 prev_ref = self.heights[share_hash][1]
751 self.reverse_heights[prev_ref].remove(share_hash)
752 if not self.reverse_heights[prev_ref] and prev_ref != ref:
753 self.reverse_heights.pop(prev_ref)
754 _, x, _ = self.height_refs.pop(prev_ref)
755 self.reverse_height_refs.pop(x)
756 self.heights[share_hash] = height_inc - ref_height_to, ref, work_inc - ref_work_inc
757 self.reverse_heights.setdefault(ref, set()).add(share_hash)
759 def get_height_work_and_last(self, share_hash):
760 assert isinstance(share_hash, (int, long, type(None)))
765 while share_hash in self.shares:
766 updates.append((share_hash, height, work))
767 height_inc, share_hash, work_inc = self._get_height_jump(share_hash)
770 for update_hash, height_then, work_then in updates:
771 self._set_height_jump(update_hash, height - height_then, share_hash, work - work_then)
772 return height, work, share_hash
774 def get_chain_known(self, start_hash):
775 assert isinstance(start_hash, (int, long, type(None)))
777 Chain starting with item of hash I{start_hash} of items that this Tracker contains
779 item_hash_to_get = start_hash
781 if item_hash_to_get not in self.shares:
783 share = self.shares[item_hash_to_get]
784 assert not isinstance(share, long)
786 item_hash_to_get = share.previous_hash
788 def get_chain_to_root(self, start_hash, root=None):
789 assert isinstance(start_hash, (int, long, type(None)))
790 assert isinstance(root, (int, long, type(None)))
792 Chain of hashes starting with share_hash of shares to the root (doesn't include root)
793 Raises an error if one is missing
795 share_hash_to_get = start_hash
796 while share_hash_to_get != root:
797 share = self.shares[share_hash_to_get]
799 share_hash_to_get = share.previous_hash
801 def get_best_hash(self):
803 Returns hash of item with the most items in its chain
807 return max(self.heads, key=self.get_height_and_last)
809 def get_highest_height(self):
810 return max(self.get_height_and_last(head)[0] for head in self.heads) if self.heads else 0
812 def is_child_of(self, share_hash, possible_child_hash):
813 height, last = self.get_height_and_last(share_hash)
814 child_height, child_last = self.get_height_and_last(possible_child_hash)
815 if child_last != last:
816 return None # not connected, so can't be determined
817 height_up = child_height - height
818 return height_up >= 0 and self.get_nth_parent_hash(possible_child_hash, height_up) == share_hash
820 class FakeShare(object):
821 def __init__(self, **kwargs):
822 self.__dict__.update(kwargs)
824 if __name__ == '__main__':
828 for i in xrange(10000):
829 t.add(FakeShare(hash=i, previous_hash=i - 1 if i > 0 else None))
833 print 'HEADS', t.heads
834 print 'TAILS', t.tails
843 for i in xrange(random.randrange(100)):
844 x = random.choice(list(t.shares) + [None])
846 t.add(FakeShare(i, x))
848 x = random.choice(list(t.shares))
849 print 'DEL', x, t.__dict__
852 except NotImplementedError:
853 print 'aborted; not implemented'
856 print 'HEADS', t.heads
857 print 'TAILS', t.tails
859 #for share_hash, share in sorted(t.shares.iteritems()):
860 # print share_hash, share.previous_hash, t.heads.get(share_hash), t.tails.get(share_hash)
862 #import sys;sys.exit()
864 print t.get_nth_parent_hash(9000, 5000)
865 print t.get_nth_parent_hash(9001, 412)
866 #print t.get_nth_parent_hash(90, 51)
868 for share_hash in sorted(t.shares):
869 print str(share_hash).rjust(4),
870 x = t.skips.get(share_hash, None)
872 print str(x[0]).rjust(4),
874 print str(a).rjust(10),