assert len(data) == 16, len(data)
return file, data
+_record_types = {}
+
+def get_record(fields):
+ fields = tuple(sorted(fields))
+ if 'keys' in fields:
+ raise ValueError()
+ if fields not in _record_types:
+ class _Record(object):
+ __slots__ = fields
+ def __getitem__(self, key):
+ return getattr(self, key)
+ def __setitem__(self, key, value):
+ setattr(self, key, value)
+ #def __iter__(self):
+ # for field in self.__slots__:
+ # yield field, getattr(self, field)
+ def keys(self):
+ return self.__slots__
+ def __eq__(self, other):
+ if isinstance(other, dict):
+ return dict(self) == other
+ elif isinstance(other, _Record):
+ return all(self[k] == other[k] for k in self.keys())
+ raise TypeError()
+ def __ne__(self, other):
+ return not (self == other)
+ _record_types[fields] = _Record
+ return _record_types[fields]()
+
class ComposedType(Type):
def __init__(self, fields):
self.fields = fields
def read(self, file):
- item = {}
+ item = get_record(k for k, v in self.fields)
for key, type_ in self.fields:
item[key], file = type_.read(file)
return item, file
class Tracker(object):
def __init__(self):
self.shares = {} # hash -> share
- self.ids = {} # hash -> (id, height)
+ #self.ids = {} # hash -> (id, height)
self.reverse_shares = {} # previous_hash -> set of share_hashes
self.heads = {} # head hash -> tail_hash
self.heights = {} # share_hash -> height_to, other_share_hash
+ '''
self.id_generator = itertools.count()
self.tails_by_id = {}
+ '''
self.get_nth_parent_hash = skiplist.DistanceSkipList(self)
if share.hash in self.shares:
return # XXX raise exception?
+ '''
parent_id = self.ids.get(share.previous_hash, None)
children_ids = set(self.ids.get(share2_hash) for share2_hash in self.reverse_shares.get(share.hash, set()))
infos = set()
infos.add((self.id_generator.next(), 0))
chosen = min(infos)
self.ids[share.hash] = chosen
+ '''
self.shares[share.hash] = share
self.reverse_shares.setdefault(share.previous_hash, set()).add(share.hash)
return self.conn.get_not_none()
class HeaderWrapper(object):
+ target = -1
+ __slots__ = 'hash previous_hash'.split(' ')
+
def __init__(self, header):
self.hash = bitcoin_data.block_header_type.hash256(header)
self.previous_hash = header['previous_block']
- self.target = header['target']
class HeightTracker(object):
'''Point this at a factory and let it take care of getting block heights'''
)
class Share(object):
- peer = None
- highest_block_on_arrival = None
-
@classmethod
def from_block(cls, block):
return cls(block['header'], gentx_to_share_info(block['txs'][0]), other_txs=block['txs'][1:])
def from_share1b(cls, share1b):
return cls(**share1b)
+ __slots__ = 'header previous_block share_info merkle_branch other_txs timestamp share_data new_script subsidy previous_hash previous_share_hash target nonce bitcoin_hash hash time_seen shared peer'.split(' ')
+
def __init__(self, header, share_info, merkle_branch=None, other_txs=None):
if merkle_branch is None and other_txs is None:
raise ValueError('need either merkle_branch or other_txs')
if script.get_sigop_count(self.new_script) > 1:
raise ValueError('too many sigops!')
+ # XXX eww
self.time_seen = time.time()
self.shared = False
+ self.peer = None
def as_block(self, tracker, net):
if self.other_txs is None:
if len(bitcoin_data.block_type.pack(dict(header=self.header, txs=[gentx] + self.other_txs))) > 1000000 - 1000:
raise ValueError('''block size too large''')
-
- self.gentx = gentx
def flag_shared(self):
self.shared = True