break
else:
if known_txs is not None:
- this_size = len(bitcoin_data.tx_type.pack(known_txs[tx_hash]))
+ this_size = bitcoin_data.tx_type.packed_size(known_txs[tx_hash])
if new_transaction_size + this_size > 50000: # only allow 50 kB of new txns/share
break
new_transaction_size += this_size
other_txs = self._get_other_txs(tracker, known_txs)
if other_txs is None:
return True, 'not all txs present'
-
- all_txs_size = sum(len(bitcoin_data.tx_type.pack(tx)) for tx in other_txs)
- if all_txs_size > 1000000:
- return True, 'txs over block size limit'
-
- new_txs_size = sum(len(bitcoin_data.tx_type.pack(known_txs[tx_hash])) for tx_hash in self.share_info['new_transaction_hashes'])
- if new_txs_size > 50000:
- return True, 'new txs over limit'
+ else:
+ all_txs_size = sum(bitcoin_data.tx_type.packed_size(tx) for tx in other_txs)
+ if all_txs_size > 1000000:
+ return True, 'txs over block size limit'
+
+ new_txs_size = sum(bitcoin_data.tx_type.packed_size(known_txs[tx_hash]) for tx_hash in self.share_info['new_transaction_hashes'])
+ if new_txs_size > 50000:
+ return True, 'new txs over limit'
return False, None
added = set(after) - set(before)
removed = set(before) - set(after)
if added:
- self.remote_remembered_txs_size += sum(len(bitcoin_data.tx_type.pack(after[x])) for x in added)
+ self.remote_remembered_txs_size += sum(bitcoin_data.tx_type.packed_size(after[x]) for x in added)
assert self.remote_remembered_txs_size <= self.max_remembered_txs_size
fragment(self.send_remember_tx, tx_hashes=[x for x in added if x in self.remote_tx_hashes], txs=[after[x] for x in added if x not in self.remote_tx_hashes])
if removed:
self.send_forget_tx(tx_hashes=list(removed))
- self.remote_remembered_txs_size -= sum(len(bitcoin_data.tx_type.pack(before[x])) for x in removed)
+ self.remote_remembered_txs_size -= sum(bitcoin_data.tx_type.packed_size(before[x]) for x in removed)
watch_id2 = self.node.mining_txs_var.transitioned.watch(update_remote_view_of_my_mining_txs)
self.connection_lost_event.watch(lambda: self.node.mining_txs_var.transitioned.unwatch(watch_id2))
- self.remote_remembered_txs_size += sum(len(bitcoin_data.tx_type.pack(x)) for x in self.node.mining_txs_var.value.values())
+ self.remote_remembered_txs_size += sum(bitcoin_data.tx_type.packed_size(x) for x in self.node.mining_txs_var.value.values())
assert self.remote_remembered_txs_size <= self.max_remembered_txs_size
fragment(self.send_remember_tx, tx_hashes=[], txs=self.node.mining_txs_var.value.values())
hashes_to_send = [x for x in tx_hashes if x not in self.node.mining_txs_var.value and x in known_txs]
- new_remote_remembered_txs_size = self.remote_remembered_txs_size + sum(len(bitcoin_data.tx_type.pack(known_txs[x])) for x in hashes_to_send)
+ new_remote_remembered_txs_size = self.remote_remembered_txs_size + sum(bitcoin_data.tx_type.packed_size(known_txs[x]) for x in hashes_to_send)
if new_remote_remembered_txs_size > self.max_remembered_txs_size:
raise ValueError('shares have too many txs')
self.remote_remembered_txs_size = new_remote_remembered_txs_size
if self.other_version >= 8:
res = self.send_forget_tx(tx_hashes=hashes_to_send)
- self.remote_remembered_txs_size -= sum(len(bitcoin_data.tx_type.pack(known_txs[x])) for x in hashes_to_send)
+ self.remote_remembered_txs_size -= sum(bitcoin_data.tx_type.packed_size(known_txs[x]) for x in hashes_to_send)
return res
return
self.remembered_txs[tx_hash] = tx
- self.remembered_txs_size += len(bitcoin_data.tx_type.pack(tx))
+ self.remembered_txs_size += bitcoin_data.tx_type.packed_size(tx)
new_known_txs = dict(self.node.known_txs_var.value)
warned = False
for tx in txs:
warned = True
self.remembered_txs[tx_hash] = tx
- self.remembered_txs_size += len(bitcoin_data.tx_type.pack(tx))
+ self.remembered_txs_size += bitcoin_data.tx_type.packed_size(tx)
new_known_txs[tx_hash] = tx
self.node.known_txs_var.set(new_known_txs)
if self.remembered_txs_size >= self.max_remembered_txs_size:
])
def handle_forget_tx(self, tx_hashes):
for tx_hash in tx_hashes:
- self.remembered_txs_size -= len(bitcoin_data.tx_type.pack(self.remembered_txs[tx_hash]))
+ self.remembered_txs_size -= bitcoin_data.tx_type.packed_size(self.remembered_txs[tx_hash])
assert self.remembered_txs_size >= 0
del self.remembered_txs[tx_hash]
raise AssertionError((self._unpack(data), obj))
return data
+
+ def packed_size(self, obj):
+ if hasattr(obj, '_packed_size') and obj._packed_size is not None:
+ type_obj, packed_size = obj._packed_size
+ if type_obj is self:
+ return packed_size
+
+ packed_size = len(self.pack(obj))
+
+ if hasattr(obj, '_packed_size'):
+ obj._packed_size = self, packed_size
+
+ return packed_size
class VarIntType(Type):
def read(self, file):
def get_record(fields):
fields = tuple(sorted(fields))
- if 'keys' in fields:
+ if 'keys' in fields or '_packed_size' in fields:
raise ValueError()
if fields not in _record_types:
class _Record(object):
- __slots__ = fields
+ __slots__ = fields + ('_packed_size',)
+ def __init__(self):
+ self._packed_size = None
def __repr__(self):
return repr(dict(self))
def __getitem__(self, key):
def __setitem__(self, key, value):
setattr(self, key, value)
#def __iter__(self):
- # for field in self.__slots__:
+ # for field in fields:
# yield field, getattr(self, field)
def keys(self):
- return self.__slots__
+ return fields
def get(self, key, default=None):
return getattr(self, key, default)
def __eq__(self, other):