X-Git-Url: https://git.novaco.in/?a=blobdiff_plain;f=p2pool%2Fwork.py;h=541234f70420ef7e8308799df9ad792f2d2c7683;hb=6da08c4e4a69cfd1fe9d55d2e019d878f26092a1;hp=dfdde9e50474dca85160f4c6148a4450b99b6d0d;hpb=1403fddc3cbce224d249ea9b330701997cb44e76;p=p2pool.git diff --git a/p2pool/work.py b/p2pool/work.py index dfdde9e..541234f 100644 --- a/p2pool/work.py +++ b/p2pool/work.py @@ -2,6 +2,7 @@ from __future__ import division import base64 import random +import re import sys import time @@ -9,48 +10,53 @@ from twisted.internet import defer from twisted.python import log import bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data -from bitcoin import script, worker_interface -from util import jsonrpc, variable, deferral, math, pack +from bitcoin import helper, script, worker_interface +from util import forest, jsonrpc, variable, deferral, math, pack import p2pool, p2pool.data as p2pool_data class WorkerBridge(worker_interface.WorkerBridge): - def __init__(self, my_pubkey_hash, net, donation_percentage, bitcoind_work, best_block_header, merged_urls, best_share_var, tracker, my_share_hashes, my_doa_share_hashes, worker_fee, p2p_node, submit_block, set_best_share, broadcast_share, block_height_var): + COINBASE_NONCE_LENGTH = 8 + + def __init__(self, node, my_pubkey_hash, donation_percentage, merged_urls, worker_fee): worker_interface.WorkerBridge.__init__(self) self.recent_shares_ts_work = [] + self.node = node self.my_pubkey_hash = my_pubkey_hash - self.net = net self.donation_percentage = donation_percentage - self.bitcoind_work = bitcoind_work - self.best_block_header = best_block_header - self.best_share_var = best_share_var - self.tracker = tracker - self.my_share_hashes = my_share_hashes - self.my_doa_share_hashes = my_doa_share_hashes self.worker_fee = worker_fee - self.p2p_node = p2p_node - self.submit_block = submit_block - self.set_best_share = set_best_share - self.broadcast_share = broadcast_share - self.block_height_var = block_height_var + self.net = self.node.net.PARENT + self.running = True self.pseudoshare_received = variable.Event() self.share_received = variable.Event() self.local_rate_monitor = math.RateMonitor(10*60) + self.local_addr_rate_monitor = math.RateMonitor(10*60) self.removed_unstales_var = variable.Variable((0, 0, 0)) self.removed_doa_unstales_var = variable.Variable(0) - @tracker.verified.removed.watch + + self.my_share_hashes = set() + self.my_doa_share_hashes = set() + + self.tracker_view = forest.TrackerView(self.node.tracker, forest.get_attributedelta_type(dict(forest.AttributeDelta.attrs, + my_count=lambda share: 1 if share.hash in self.my_share_hashes else 0, + my_doa_count=lambda share: 1 if share.hash in self.my_doa_share_hashes else 0, + my_orphan_announce_count=lambda share: 1 if share.hash in self.my_share_hashes and share.share_data['stale_info'] == 'orphan' else 0, + my_dead_announce_count=lambda share: 1 if share.hash in self.my_share_hashes and share.share_data['stale_info'] == 'doa' else 0, + ))) + + @self.node.tracker.verified.removed.watch def _(share): - if share.hash in self.my_share_hashes and tracker.is_child_of(share.hash, self.best_share_var.value): + if share.hash in self.my_share_hashes and self.node.tracker.is_child_of(share.hash, self.node.best_share_var.value): assert share.share_data['stale_info'] in [None, 'orphan', 'doa'] # we made these shares in this instance self.removed_unstales_var.set(( self.removed_unstales_var.value[0] + 1, self.removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 'orphan' else 0), self.removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 'doa' else 0), )) - if share.hash in self.my_doa_share_hashes and self.tracker.is_child_of(share.hash, self.best_share_var.value): + if share.hash in self.my_doa_share_hashes and self.node.tracker.is_child_of(share.hash, self.node.best_share_var.value): self.removed_doa_unstales_var.set(self.removed_doa_unstales_var.value + 1) # MERGED WORK @@ -59,10 +65,10 @@ class WorkerBridge(worker_interface.WorkerBridge): @defer.inlineCallbacks def set_merged_work(merged_url, merged_userpass): - merged_proxy = jsonrpc.Proxy(merged_url, dict(Authorization='Basic ' + base64.b64encode(merged_userpass))) - while True: - auxblock = yield deferral.retry('Error while calling merged getauxblock:', 30)(merged_proxy.rpc_getauxblock)() - self.merged_work.set(dict(self.merged_work.value, **{auxblock['chainid']: dict( + merged_proxy = jsonrpc.HTTPProxy(merged_url, dict(Authorization='Basic ' + base64.b64encode(merged_userpass))) + while self.running: + auxblock = yield deferral.retry('Error while calling merged getauxblock on %s:' % (merged_url,), 30)(merged_proxy.rpc_getauxblock)() + self.merged_work.set(math.merge_dicts(self.merged_work.value, {auxblock['chainid']: dict( hash=int(auxblock['hash'], 16), target='p2pool' if auxblock['target'] == 'p2pool' else pack.IntType(256).unpack(auxblock['target'].decode('hex')), merged_proxy=merged_proxy, @@ -79,27 +85,29 @@ class WorkerBridge(worker_interface.WorkerBridge): self.current_work = variable.Variable(None) def compute_work(): - t = self.bitcoind_work.value - bb = self.best_block_header.value - if bb is not None and bb['previous_block'] == t['previous_block'] and net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(bb)) <= t['bits'].target: + t = self.node.bitcoind_work.value + bb = self.node.best_block_header.value + if bb is not None and bb['previous_block'] == t['previous_block'] and bitcoin_data.scrypt(bitcoin_data.block_header_type.pack(bb)) <= t['bits'].target: print 'Skipping from block %x to block %x!' % (bb['previous_block'], bitcoin_data.hash256(bitcoin_data.block_header_type.pack(bb))) t = dict( version=bb['version'], - previous_block=bitcoin_data.hash256(bitcoin_data.block_header_type.pack(bb)), - bits=bb['bits'], # not always true + previous_block=bitcoin_data.scrypt(bitcoin_data.block_header_type.pack(bb)), + bits=self.node.pow_bits, # not always true coinbaseflags='', height=t['height'] + 1, - time=bb['timestamp'] + 600, # better way? + time=t['time'] + 30, # better way? transactions=[], + transaction_fees=[], + txn_timestamp=0, merkle_link=bitcoin_data.calculate_merkle_link([None], 0), - subsidy=net.PARENT.SUBSIDY_FUNC(self.block_height_var.value), - last_update=self.bitcoind_work.value['last_update'], + subsidy=self.node.pow_subsidy, + last_update=self.node.bitcoind_work.value['last_update'], ) self.current_work.set(t) - self.bitcoind_work.changed.watch(lambda _: compute_work()) - self.best_block_header.changed.watch(lambda _: compute_work()) + self.node.bitcoind_work.changed.watch(lambda _: compute_work()) + self.node.best_block_header.changed.watch(lambda _: compute_work()) compute_work() self.new_work_event = variable.Event() @@ -109,13 +117,16 @@ class WorkerBridge(worker_interface.WorkerBridge): if any(before[x] != after[x] for x in ['version', 'previous_block', 'bits']) or (not before['transactions'] and after['transactions']): self.new_work_event.happened() self.merged_work.changed.watch(lambda _: self.new_work_event.happened()) - self.best_share_var.changed.watch(lambda _: self.new_work_event.happened()) + self.node.best_share_var.changed.watch(lambda _: self.new_work_event.happened()) + + def stop(self): + self.running = False def get_stale_counts(self): '''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)''' my_shares = len(self.my_share_hashes) my_doa_shares = len(self.my_doa_share_hashes) - delta = self.tracker.verified.get_delta_to_last(self.best_share_var.value) + delta = self.tracker_view.get_delta_to_last(self.node.best_share_var.value) my_shares_in_chain = delta.my_count + self.removed_unstales_var.value[0] my_doa_shares_in_chain = delta.my_doa_count + self.removed_doa_unstales_var.value orphans_recorded_in_chain = delta.my_orphan_announce_count + self.removed_unstales_var.value[1] @@ -126,46 +137,73 @@ class WorkerBridge(worker_interface.WorkerBridge): return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain) - def get_user_details(self, request): - user = request.getUser() if request.getUser() is not None else '' + def get_user_details(self, username): + contents = re.split('([+/])', username) + assert len(contents) % 2 == 1 - desired_pseudoshare_target = None - if '+' in user: - user, desired_pseudoshare_difficulty_str = user.rsplit('+', 1) - try: - desired_pseudoshare_target = bitcoin_data.difficulty_to_target(float(desired_pseudoshare_difficulty_str)) - except: - pass + user, contents2 = contents[0], contents[1:] - desired_share_target = 2**256 - 1 - if '/' in user: - user, min_diff_str = user.rsplit('/', 1) - try: - desired_share_target = bitcoin_data.difficulty_to_target(float(min_diff_str)) - except: - pass + desired_pseudoshare_target = None + desired_share_target = None + for symbol, parameter in zip(contents2[::2], contents2[1::2]): + if symbol == '+': + try: + desired_pseudoshare_target = bitcoin_data.difficulty_to_target(float(parameter)) + except: + if p2pool.DEBUG: + log.err() + elif symbol == '/': + try: + desired_share_target = bitcoin_data.difficulty_to_target(float(parameter)) + except: + if p2pool.DEBUG: + log.err() if random.uniform(0, 100) < self.worker_fee: pubkey_hash = self.my_pubkey_hash else: try: - pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, self.net.PARENT) + pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, self.node.net.PARENT) except: # XXX blah pubkey_hash = self.my_pubkey_hash return user, pubkey_hash, desired_share_target, desired_pseudoshare_target - def preprocess_request(self, request): - user, pubkey_hash, desired_share_target, desired_pseudoshare_target = self.get_user_details(request) + def preprocess_request(self, user): + if (self.node.p2p_node is None or len(self.node.p2p_node.peers) == 0) and self.node.net.PERSIST: + raise jsonrpc.Error_for_code(-12345)(u'p2pool is not connected to any peers') + if time.time() > self.current_work.value['last_update'] + 60: + raise jsonrpc.Error_for_code(-12345)(u'lost contact with bitcoind') + user, pubkey_hash, desired_share_target, desired_pseudoshare_target = self.get_user_details(user) return pubkey_hash, desired_share_target, desired_pseudoshare_target + def _estimate_local_hash_rate(self): + if len(self.recent_shares_ts_work) == 50: + hash_rate = sum(work for ts, work in self.recent_shares_ts_work[1:])//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0]) + if hash_rate: + return hash_rate + return None + + def get_local_rates(self): + miner_hash_rates = {} + miner_dead_hash_rates = {} + datums, dt = self.local_rate_monitor.get_datums_in_last() + for datum in datums: + miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt + if datum['dead']: + miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt + return miner_hash_rates, miner_dead_hash_rates + + def get_local_addr_rates(self): + addr_hash_rates = {} + datums, dt = self.local_addr_rate_monitor.get_datums_in_last() + for datum in datums: + addr_hash_rates[datum['pubkey_hash']] = addr_hash_rates.get(datum['pubkey_hash'], 0) + datum['work']/dt + return addr_hash_rates + def get_work(self, pubkey_hash, desired_share_target, desired_pseudoshare_target): - if len(self.p2p_node.peers) == 0 and self.net.PERSIST: - raise jsonrpc.Error_for_code(-12345)(u'p2pool is not connected to any peers') - if self.best_share_var.value is None and self.net.PERSIST: + if self.node.best_share_var.value is None and self.node.net.PERSIST: raise jsonrpc.Error_for_code(-12345)(u'p2pool is downloading shares') - if time.time() > self.current_work.value['last_update'] + 60: - raise jsonrpc.Error_for_code(-12345)(u'lost contact with bitcoind') if self.merged_work.value: tree, size = bitcoin_data.make_auxpow_tree(self.merged_work.value) @@ -182,12 +220,55 @@ class WorkerBridge(worker_interface.WorkerBridge): tx_hashes = [bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in self.current_work.value['transactions']] tx_map = dict(zip(tx_hashes, self.current_work.value['transactions'])) + txn_timestamp = self.current_work.value['txn_timestamp'] + + previous_share = self.node.tracker.items[self.node.best_share_var.value] if self.node.best_share_var.value is not None else None + if previous_share is None: + share_type = p2pool_data.Share + else: + previous_share_type = type(previous_share) + + if previous_share_type.SUCCESSOR is None or self.node.tracker.get_height(previous_share.hash) < self.node.net.CHAIN_LENGTH: + share_type = previous_share_type + else: + successor_type = previous_share_type.SUCCESSOR + + counts = p2pool_data.get_desired_version_counts(self.node.tracker, + self.node.tracker.get_nth_parent_hash(previous_share.hash, self.node.net.CHAIN_LENGTH*9//10), self.node.net.CHAIN_LENGTH//10) + upgraded = counts.get(successor_type.VERSION, 0)/sum(counts.itervalues()) + if upgraded > .65: + print 'Switchover imminent. Upgraded: %.3f%% Threshold: %.3f%%' % (upgraded*100, 95) + print + # Share -> NewShare only valid if 95% of hashes in [net.CHAIN_LENGTH*9//10, net.CHAIN_LENGTH] for new version + if counts.get(successor_type.VERSION, 0) > sum(counts.itervalues())*95//100: + share_type = successor_type + else: + share_type = previous_share_type + + if desired_share_target is None: + desired_share_target = 2**256-1 + local_hash_rate = self._estimate_local_hash_rate() + if local_hash_rate is not None: + desired_share_target = min(desired_share_target, + bitcoin_data.average_attempts_to_target(local_hash_rate * self.node.net.SHARE_PERIOD / 0.0167)) # limit to 1.67% of pool shares by modulating share difficulty + + local_addr_rates = self.get_local_addr_rates() + lookbehind = 3600//self.node.net.SHARE_PERIOD + block_subsidy = self.node.bitcoind_work.value['subsidy'] + if previous_share is not None and self.node.tracker.get_height(previous_share.hash) > lookbehind: + expected_payout_per_block = local_addr_rates.get(pubkey_hash, 0)/p2pool_data.get_pool_attempts_per_second(self.node.tracker, self.node.best_share_var.value, lookbehind) \ + * block_subsidy*(1-self.donation_percentage/100) # XXX doesn't use global stale rate to compute pool hash + if expected_payout_per_block < self.node.net.PARENT.DUST_THRESHOLD: + desired_share_target = min(desired_share_target, + bitcoin_data.average_attempts_to_target((bitcoin_data.target_to_average_attempts(self.node.bitcoind_work.value['bits'].target)*self.node.net.SPREAD)*self.node.net.PARENT.DUST_THRESHOLD/block_subsidy) + ) if True: - share_info, gentx, other_transaction_hashes, get_share = p2pool_data.Share.generate_transaction( - tracker=self.tracker, + desired_timestamp = int(time.time() + 0.5) + share_info, gentx, other_transaction_hashes, get_share = share_type.generate_transaction( + tracker=self.node.tracker, share_data=dict( - previous_share_hash=self.best_share_var.value, + previous_share_hash=self.node.best_share_var.value, coinbase=(script.create_push_script([ self.current_work.value['height'], ] + ([mm_data] if mm_data else []) + [ @@ -201,72 +282,81 @@ class WorkerBridge(worker_interface.WorkerBridge): 'doa' if doas > doas_recorded_in_chain else None )(*self.get_stale_counts()), - desired_version=5, + desired_version=(share_type.SUCCESSOR if share_type.SUCCESSOR is not None else share_type).VOTING_VERSION, ), block_target=self.current_work.value['bits'].target, - desired_timestamp=int(time.time() + 0.5), + desired_timestamp=desired_timestamp if txn_timestamp < desired_timestamp else txn_timestamp + 1, desired_target=desired_share_target, ref_merkle_link=dict(branch=[], index=0), - other_transaction_hashes=tx_hashes, - net=self.net, + desired_other_transaction_hashes_and_fees=zip(tx_hashes, self.current_work.value['transaction_fees']), + net=self.node.net, + known_txs=tx_map, + base_subsidy=self.node.pow_subsidy, + #base_subsidy=self.node.net.PARENT.SUBSIDY_FUNC(self.current_work.value['height']), ) - transactions = [gentx] + [tx_map[tx_hash] for tx_hash in other_transaction_hashes] + packed_gentx = bitcoin_data.tx_type.pack(gentx) + other_transactions = [tx_map[tx_hash] for tx_hash in other_transaction_hashes] mm_later = [(dict(aux_work, target=aux_work['target'] if aux_work['target'] != 'p2pool' else share_info['bits'].target), index, hashes) for aux_work, index, hashes in mm_later] if desired_pseudoshare_target is None: target = 2**256-1 - if len(self.recent_shares_ts_work) == 50: - hash_rate = sum(work for ts, work in self.recent_shares_ts_work[1:])//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0]) - if hash_rate: - target = min(target, int(2**256/hash_rate)) + local_hash_rate = self._estimate_local_hash_rate() + if local_hash_rate is not None: + target = min(target, + bitcoin_data.average_attempts_to_target(local_hash_rate * 1)) # limit to 1 share response every second by modulating pseudoshare difficulty else: target = desired_pseudoshare_target target = max(target, share_info['bits'].target) for aux_work, index, hashes in mm_later: target = max(target, aux_work['target']) - target = math.clip(target, self.net.PARENT.SANE_TARGET_RANGE) + target = math.clip(target, self.node.net.PARENT.SANE_TARGET_RANGE) getwork_time = time.time() lp_count = self.new_work_event.times - merkle_link = bitcoin_data.calculate_merkle_link([bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in transactions], 0) + merkle_link = bitcoin_data.calculate_merkle_link([None] + other_transaction_hashes, 0) print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % ( bitcoin_data.target_to_difficulty(target), bitcoin_data.target_to_difficulty(share_info['bits'].target), - self.current_work.value['subsidy']*1e-8, self.net.PARENT.SYMBOL, + self.current_work.value['subsidy']*1e-6, self.node.net.PARENT.SYMBOL, len(self.current_work.value['transactions']), ) - ba = bitcoin_getwork.BlockAttempt( + ba = dict( version=min(self.current_work.value['version'], 2), previous_block=self.current_work.value['previous_block'], - merkle_root=bitcoin_data.check_merkle_link(bitcoin_data.hash256(bitcoin_data.tx_type.pack(transactions[0])), merkle_link), - timestamp=self.current_work.value['time'], + merkle_link=merkle_link, + coinb1=packed_gentx[:-self.COINBASE_NONCE_LENGTH-4], + coinb2=packed_gentx[-4:], + timestamp=self.current_work.value['time'] + 100, bits=self.current_work.value['bits'], share_target=target, ) received_header_hashes = set() - def got_response(header, request): - header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)) - pow_hash = self.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header)) + def got_response(header, user, coinbase_nonce): + assert len(coinbase_nonce) == self.COINBASE_NONCE_LENGTH + new_packed_gentx = packed_gentx[:-self.COINBASE_NONCE_LENGTH-4] + coinbase_nonce + packed_gentx[-4:] if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else packed_gentx + new_gentx = bitcoin_data.tx_type.unpack(new_packed_gentx) if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else gentx + + header_hash = pow_hash = bitcoin_data.scrypt(bitcoin_data.block_header_type.pack(header)) try: if pow_hash <= header['bits'].target or p2pool.DEBUG: - self.submit_block(dict(header=header, txs=transactions), ignore_failure=False) + helper.submit_block(dict(header=header, txs=[new_gentx] + other_transactions, signature=''), False, self.node.factory, self.node.bitcoind, self.node.bitcoind_work, self.node.net) if pow_hash <= header['bits'].target: print - print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash) + print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash) print except: log.err(None, 'Error while processing potential block:') - user, _, _, _ = self.get_user_details(request) - assert header['previous_block'] == ba.previous_block - assert header['merkle_root'] == ba.merkle_root - assert header['bits'] == ba.bits + user, _, _, _ = self.get_user_details(user) + assert header['previous_block'] == ba['previous_block'] + assert header['merkle_root'] == bitcoin_data.check_merkle_link(bitcoin_data.hash256(new_packed_gentx), merkle_link) + assert header['bits'] == ba['bits'] on_time = self.new_work_event.times == lp_count @@ -277,7 +367,7 @@ class WorkerBridge(worker_interface.WorkerBridge): pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'), bitcoin_data.aux_pow_type.pack(dict( merkle_tx=dict( - tx=transactions[0], + tx=new_gentx, block_hash=header_hash, merkle_link=merkle_link, ), @@ -298,10 +388,11 @@ class WorkerBridge(worker_interface.WorkerBridge): log.err(None, 'Error while processing merged mining POW:') if pow_hash <= share_info['bits'].target and header_hash not in received_header_hashes: - share = get_share(header, transactions) + last_txout_nonce = pack.IntType(8*self.COINBASE_NONCE_LENGTH).unpack(coinbase_nonce) + share = get_share(header, last_txout_nonce) print 'GOT SHARE! %s %s prev %s age %.2fs%s' % ( - request.getUser(), + user, p2pool_data.format_hash(share.hash), p2pool_data.format_hash(share.previous_hash), time.time() - getwork_time, @@ -311,25 +402,23 @@ class WorkerBridge(worker_interface.WorkerBridge): if not on_time: self.my_doa_share_hashes.add(share.hash) - self.tracker.add(share) - if not p2pool.DEBUG: - self.tracker.verified.add(share) - self.set_best_share() + self.node.tracker.add(share) + self.node.set_best_share() try: - if pow_hash <= header['bits'].target or p2pool.DEBUG: - self.broadcast_share(share.hash) + if (pow_hash <= header['bits'].target or p2pool.DEBUG) and self.node.p2p_node is not None: + self.node.p2p_node.broadcast_share(share.hash) except: log.err(None, 'Error forwarding block solution:') - self.share_received.happened(bitcoin_data.target_to_average_attempts(share.target), not on_time) + self.share_received.happened(bitcoin_data.target_to_average_attempts(share.target), not on_time, share.hash) if pow_hash > target: - print 'Worker %s submitted share with hash > target:' % (request.getUser(),) + print 'Worker %s submitted share with hash > target:' % (user,) print ' Hash: %56x' % (pow_hash,) print ' Target: %56x' % (target,) elif header_hash in received_header_hashes: - print >>sys.stderr, 'Worker %s @ %s submitted share more than once!' % (request.getUser(), request.getClientIP()) + print >>sys.stderr, 'Worker %s submitted share more than once!' % (user,) else: received_header_hashes.add(header_hash) @@ -337,7 +426,8 @@ class WorkerBridge(worker_interface.WorkerBridge): self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target))) while len(self.recent_shares_ts_work) > 50: self.recent_shares_ts_work.pop(0) - self.local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=user)) + self.local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=user, share_target=share_info['bits'].target)) + self.local_addr_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), pubkey_hash=pubkey_hash)) return on_time