From 1b0c9e59da76fb818ba2f706f795545c5d45f612 Mon Sep 17 00:00:00 2001 From: Forrest Voight Date: Sun, 28 Oct 2012 18:26:37 -0400 Subject: [PATCH] cache packed sizes of transactions so they don't have to be repacked often --- p2pool/data.py | 18 +++++++++--------- p2pool/p2p.py | 16 ++++++++-------- p2pool/util/pack.py | 23 +++++++++++++++++++---- 3 files changed, 36 insertions(+), 21 deletions(-) diff --git a/p2pool/data.py b/p2pool/data.py index 1d670f4..f770ad1 100644 --- a/p2pool/data.py +++ b/p2pool/data.py @@ -388,7 +388,7 @@ class NewShare(object): break else: if known_txs is not None: - this_size = len(bitcoin_data.tx_type.pack(known_txs[tx_hash])) + this_size = bitcoin_data.tx_type.packed_size(known_txs[tx_hash]) if new_transaction_size + this_size > 50000: # only allow 50 kB of new txns/share break new_transaction_size += this_size @@ -555,14 +555,14 @@ class NewShare(object): other_txs = self._get_other_txs(tracker, known_txs) if other_txs is None: return True, 'not all txs present' - - all_txs_size = sum(len(bitcoin_data.tx_type.pack(tx)) for tx in other_txs) - if all_txs_size > 1000000: - return True, 'txs over block size limit' - - new_txs_size = sum(len(bitcoin_data.tx_type.pack(known_txs[tx_hash])) for tx_hash in self.share_info['new_transaction_hashes']) - if new_txs_size > 50000: - return True, 'new txs over limit' + else: + all_txs_size = sum(bitcoin_data.tx_type.packed_size(tx) for tx in other_txs) + if all_txs_size > 1000000: + return True, 'txs over block size limit' + + new_txs_size = sum(bitcoin_data.tx_type.packed_size(known_txs[tx_hash]) for tx_hash in self.share_info['new_transaction_hashes']) + if new_txs_size > 50000: + return True, 'new txs over limit' return False, None diff --git a/p2pool/p2p.py b/p2pool/p2p.py index 22a731f..9c1c5d0 100644 --- a/p2pool/p2p.py +++ b/p2pool/p2p.py @@ -182,16 +182,16 @@ class Protocol(p2protocol.Protocol): added = set(after) - set(before) removed = set(before) - set(after) if added: - self.remote_remembered_txs_size += sum(len(bitcoin_data.tx_type.pack(after[x])) for x in added) + self.remote_remembered_txs_size += sum(bitcoin_data.tx_type.packed_size(after[x]) for x in added) assert self.remote_remembered_txs_size <= self.max_remembered_txs_size fragment(self.send_remember_tx, tx_hashes=[x for x in added if x in self.remote_tx_hashes], txs=[after[x] for x in added if x not in self.remote_tx_hashes]) if removed: self.send_forget_tx(tx_hashes=list(removed)) - self.remote_remembered_txs_size -= sum(len(bitcoin_data.tx_type.pack(before[x])) for x in removed) + self.remote_remembered_txs_size -= sum(bitcoin_data.tx_type.packed_size(before[x]) for x in removed) watch_id2 = self.node.mining_txs_var.transitioned.watch(update_remote_view_of_my_mining_txs) self.connection_lost_event.watch(lambda: self.node.mining_txs_var.transitioned.unwatch(watch_id2)) - self.remote_remembered_txs_size += sum(len(bitcoin_data.tx_type.pack(x)) for x in self.node.mining_txs_var.value.values()) + self.remote_remembered_txs_size += sum(bitcoin_data.tx_type.packed_size(x) for x in self.node.mining_txs_var.value.values()) assert self.remote_remembered_txs_size <= self.max_remembered_txs_size fragment(self.send_remember_tx, tx_hashes=[], txs=self.node.mining_txs_var.value.values()) @@ -270,7 +270,7 @@ class Protocol(p2protocol.Protocol): hashes_to_send = [x for x in tx_hashes if x not in self.node.mining_txs_var.value and x in known_txs] - new_remote_remembered_txs_size = self.remote_remembered_txs_size + sum(len(bitcoin_data.tx_type.pack(known_txs[x])) for x in hashes_to_send) + new_remote_remembered_txs_size = self.remote_remembered_txs_size + sum(bitcoin_data.tx_type.packed_size(known_txs[x]) for x in hashes_to_send) if new_remote_remembered_txs_size > self.max_remembered_txs_size: raise ValueError('shares have too many txs') self.remote_remembered_txs_size = new_remote_remembered_txs_size @@ -282,7 +282,7 @@ class Protocol(p2protocol.Protocol): if self.other_version >= 8: res = self.send_forget_tx(tx_hashes=hashes_to_send) - self.remote_remembered_txs_size -= sum(len(bitcoin_data.tx_type.pack(known_txs[x])) for x in hashes_to_send) + self.remote_remembered_txs_size -= sum(bitcoin_data.tx_type.packed_size(known_txs[x]) for x in hashes_to_send) return res @@ -359,7 +359,7 @@ class Protocol(p2protocol.Protocol): return self.remembered_txs[tx_hash] = tx - self.remembered_txs_size += len(bitcoin_data.tx_type.pack(tx)) + self.remembered_txs_size += bitcoin_data.tx_type.packed_size(tx) new_known_txs = dict(self.node.known_txs_var.value) warned = False for tx in txs: @@ -374,7 +374,7 @@ class Protocol(p2protocol.Protocol): warned = True self.remembered_txs[tx_hash] = tx - self.remembered_txs_size += len(bitcoin_data.tx_type.pack(tx)) + self.remembered_txs_size += bitcoin_data.tx_type.packed_size(tx) new_known_txs[tx_hash] = tx self.node.known_txs_var.set(new_known_txs) if self.remembered_txs_size >= self.max_remembered_txs_size: @@ -384,7 +384,7 @@ class Protocol(p2protocol.Protocol): ]) def handle_forget_tx(self, tx_hashes): for tx_hash in tx_hashes: - self.remembered_txs_size -= len(bitcoin_data.tx_type.pack(self.remembered_txs[tx_hash])) + self.remembered_txs_size -= bitcoin_data.tx_type.packed_size(self.remembered_txs[tx_hash]) assert self.remembered_txs_size >= 0 del self.remembered_txs[tx_hash] diff --git a/p2pool/util/pack.py b/p2pool/util/pack.py index d097aef..2762605 100644 --- a/p2pool/util/pack.py +++ b/p2pool/util/pack.py @@ -75,6 +75,19 @@ class Type(object): raise AssertionError((self._unpack(data), obj)) return data + + def packed_size(self, obj): + if hasattr(obj, '_packed_size') and obj._packed_size is not None: + type_obj, packed_size = obj._packed_size + if type_obj is self: + return packed_size + + packed_size = len(self.pack(obj)) + + if hasattr(obj, '_packed_size'): + obj._packed_size = self, packed_size + + return packed_size class VarIntType(Type): def read(self, file): @@ -228,11 +241,13 @@ _record_types = {} def get_record(fields): fields = tuple(sorted(fields)) - if 'keys' in fields: + if 'keys' in fields or '_packed_size' in fields: raise ValueError() if fields not in _record_types: class _Record(object): - __slots__ = fields + __slots__ = fields + ('_packed_size',) + def __init__(self): + self._packed_size = None def __repr__(self): return repr(dict(self)) def __getitem__(self, key): @@ -240,10 +255,10 @@ def get_record(fields): def __setitem__(self, key, value): setattr(self, key, value) #def __iter__(self): - # for field in self.__slots__: + # for field in fields: # yield field, getattr(self, field) def keys(self): - return self.__slots__ + return fields def get(self, key, default=None): return getattr(self, key, default) def __eq__(self, other): -- 1.7.1