From 6dc8cdbb9ed1e538e2205a0274d731aa608d3d0c Mon Sep 17 00:00:00 2001 From: Forrest Voight Date: Sun, 18 Aug 2013 15:34:07 -0400 Subject: [PATCH] cleaned up ShareStore interface --- p2pool/data.py | 17 ++++++----------- p2pool/main.py | 20 +++++++------------- 2 files changed, 13 insertions(+), 24 deletions(-) diff --git a/p2pool/data.py b/p2pool/data.py index 7c21fd8..aea8d8c 100644 --- a/p2pool/data.py +++ b/p2pool/data.py @@ -940,17 +940,11 @@ def format_hash(x): return '%08x' % (x % 2**32) class ShareStore(object): - def __init__(self, prefix, net): - self.filename = prefix + def __init__(self, prefix, net, share_cb, verified_hash_cb): self.dirname = os.path.dirname(os.path.abspath(prefix)) self.filename = os.path.basename(os.path.abspath(prefix)) self.net = net - self.known = None # will be filename -> set of share hashes, set of verified hashes - self.known_desired = None - - def get_shares(self): - if self.known is not None: - raise AssertionError() + known = {} filenames, next = self.get_filenames_and_next() for filename in filenames: @@ -966,20 +960,21 @@ class ShareStore(object): pass elif type_id == 2: verified_hash = int(data_hex, 16) - yield 'verified_hash', verified_hash + verified_hash_cb(verified_hash) verified_hashes.add(verified_hash) elif type_id == 5: raw_share = share_type.unpack(data_hex.decode('hex')) if raw_share['type'] in [0, 1, 2, 3, 4, 5, 6, 7, 8]: continue share = load_share(raw_share, self.net, None) - yield 'share', share + share_cb(share) share_hashes.add(share.hash) else: raise NotImplementedError("share type %i" % (type_id,)) except Exception: log.err(None, "HARMLESS error while reading saved shares, continuing where left off:") - self.known = known + + self.known = known # filename -> (set of share hashes, set of verified hashes) self.known_desired = dict((k, (set(a), set(b))) for k, (a, b) in known.iteritems()) def _add_line(self, line): diff --git a/p2pool/main.py b/p2pool/main.py index a9ae470..1dfc958 100644 --- a/p2pool/main.py +++ b/p2pool/main.py @@ -98,20 +98,15 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint): print ' ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT) print - ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net) + print "Loading shares..." shares = {} known_verified = set() - print "Loading shares..." - for i, (mode, contents) in enumerate(ss.get_shares()): - if mode == 'share': - contents.time_seen = 0 - shares[contents.hash] = contents - if len(shares) % 1000 == 0 and shares: - print " %i" % (len(shares),) - elif mode == 'verified_hash': - known_verified.add(contents) - else: - raise AssertionError() + def share_cb(share): + share.time_seen = 0 # XXX + shares[share.hash] = share + if len(shares) % 1000 == 0 and shares: + print " %i" % (len(shares),) + ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net, share_cb, known_verified.add) print " ...done loading %i shares (%i verified)!" % (len(shares), len(known_verified)) print @@ -127,7 +122,6 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint): for share_hash in known_verified: if share_hash not in node.tracker.verified.items: ss.forget_verified_share(share_hash) - del shares, known_verified node.tracker.removed.watch(lambda share: ss.forget_share(share.hash)) node.tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash)) -- 1.7.1