X-Git-Url: https://git.novaco.in/?a=blobdiff_plain;f=p2pool%2Fweb.py;h=6f72550373eafcc10dad957cc30864b151e43c2f;hb=b142f305e6c8cfbe68e5bb5c8e2f907d5f679741;hp=6ca5607694e2f7489575d3a78684daa8bf99a9a2;hpb=70d337b9024ff6564fcbebae114c95b91422aed3;p=p2pool.git diff --git a/p2pool/web.py b/p2pool/web.py index 6ca5607..6f72550 100644 --- a/p2pool/web.py +++ b/p2pool/web.py @@ -5,15 +5,16 @@ import json import os import sys import time +import traceback -from twisted.internet import task +from twisted.internet import defer, reactor from twisted.python import log from twisted.web import resource, static import p2pool from bitcoin import data as bitcoin_data -from . import data as p2pool_data -from util import graph, math +from . import data as p2pool_data, p2p +from util import deferral, deferred_resource, graph, math, memory, pack, variable def _atomic_read(filename): try: @@ -44,21 +45,22 @@ def _atomic_write(filename, data): os.remove(filename) os.rename(filename + '.new', filename) -def get_web_root(tracker, bitcoind_work, get_current_txouts, datadir_path, net, get_stale_counts, my_pubkey_hash, local_rate_monitor, worker_fee, p2p_node, my_share_hashes, pseudoshare_received, share_received, best_share_var): +def get_web_root(wb, datadir_path, bitcoind_getinfo_var, stop_event=variable.Event()): + node = wb.node start_time = time.time() web_root = resource.Resource() def get_users(): - height, last = tracker.get_height_and_last(best_share_var.value) - weights, total_weight, donation_weight = tracker.get_cumulative_weights(best_share_var.value, min(height, 720), 65535*2**256) + height, last = node.tracker.get_height_and_last(node.best_share_var.value) + weights, total_weight, donation_weight = node.tracker.get_cumulative_weights(node.best_share_var.value, min(height, 720), 65535*2**256) res = {} for script in sorted(weights, key=lambda s: weights[s]): - res[bitcoin_data.script2_to_address(script, net.PARENT)] = weights[script]/total_weight + res[bitcoin_data.script2_to_address(script, node.net.PARENT)] = weights[script]/total_weight return res def get_current_scaled_txouts(scale, trunc=0): - txouts = get_current_txouts() + txouts = node.get_current_txouts() total = sum(txouts.itervalues()) results = dict((script, value*scale//total) for script, value in txouts.iteritems()) if trunc > 0: @@ -84,60 +86,50 @@ def get_web_root(tracker, bitcoind_work, get_current_txouts, datadir_path, net, total = int(float(total)*1e8) trunc = int(float(trunc)*1e8) return json.dumps(dict( - (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8) + (bitcoin_data.script2_to_address(script, node.net.PARENT), value/1e8) for script, value in get_current_scaled_txouts(total, trunc).iteritems() - if bitcoin_data.script2_to_address(script, net.PARENT) is not None + if bitcoin_data.script2_to_address(script, node.net.PARENT) is not None )) - def get_local_rates(): - miner_hash_rates = {} - miner_dead_hash_rates = {} - datums, dt = local_rate_monitor.get_datums_in_last() - for datum in datums: - miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt - if datum['dead']: - miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt - return miner_hash_rates, miner_dead_hash_rates - def get_global_stats(): # averaged over last hour - lookbehind = 3600//net.SHARE_PERIOD - if tracker.get_height(best_share_var.value) < lookbehind: + if node.tracker.get_height(node.best_share_var.value) < 10: return None + lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600//node.net.SHARE_PERIOD) - nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, best_share_var.value, lookbehind) - stale_prop = p2pool_data.get_average_stale_prop(tracker, best_share_var.value, lookbehind) + nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, lookbehind) + stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind) return dict( pool_nonstale_hash_rate=nonstale_hash_rate, pool_hash_rate=nonstale_hash_rate/(1 - stale_prop), pool_stale_prop=stale_prop, - min_difficulty=bitcoin_data.target_to_difficulty(tracker.shares[best_share_var.value].max_target), + min_difficulty=bitcoin_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target), ) def get_local_stats(): - lookbehind = 3600//net.SHARE_PERIOD - if tracker.get_height(best_share_var.value) < lookbehind: + if node.tracker.get_height(node.best_share_var.value) < 10: return None + lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600//node.net.SHARE_PERIOD) - global_stale_prop = p2pool_data.get_average_stale_prop(tracker, best_share_var.value, lookbehind) + global_stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind) - my_unstale_count = sum(1 for share in tracker.get_chain(best_share_var.value, lookbehind) if share.hash in my_share_hashes) - my_orphan_count = sum(1 for share in tracker.get_chain(best_share_var.value, lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 'orphan') - my_doa_count = sum(1 for share in tracker.get_chain(best_share_var.value, lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 'doa') + my_unstale_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes) + my_orphan_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'orphan') + my_doa_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'doa') my_share_count = my_unstale_count + my_orphan_count + my_doa_count my_stale_count = my_orphan_count + my_doa_count my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None my_work = sum(bitcoin_data.target_to_average_attempts(share.target) - for share in tracker.get_chain(best_share_var.value, lookbehind - 1) - if share.hash in my_share_hashes) - actual_time = (tracker.shares[best_share_var.value].timestamp - - tracker.shares[tracker.get_nth_parent_hash(best_share_var.value, lookbehind - 1)].timestamp) + for share in node.tracker.get_chain(node.best_share_var.value, lookbehind - 1) + if share.hash in wb.my_share_hashes) + actual_time = (node.tracker.items[node.best_share_var.value].timestamp - + node.tracker.items[node.tracker.get_nth_parent_hash(node.best_share_var.value, lookbehind - 1)].timestamp) share_att_s = my_work / actual_time - miner_hash_rates, miner_dead_hash_rates = get_local_rates() - (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts() + miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates() + (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() return dict( my_hash_rates_in_last_hour=dict( @@ -163,8 +155,8 @@ def get_web_root(tracker, bitcoind_work, get_current_txouts, datadir_path, net, efficiency_if_miner_perfect=(1 - stale_orphan_shares/shares)/(1 - global_stale_prop) if shares else None, # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection efficiency=(1 - (stale_orphan_shares+stale_doa_shares)/shares)/(1 - global_stale_prop) if shares else None, peers=dict( - incoming=sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming), - outgoing=sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming), + incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), + outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming), ), shares=dict( total=shares, @@ -172,38 +164,65 @@ def get_web_root(tracker, bitcoind_work, get_current_txouts, datadir_path, net, dead=stale_doa_shares, ), uptime=time.time() - start_time, - block_value=bitcoind_work.value['subsidy']*1e-8, - warnings=p2pool_data.get_warnings(tracker, best_share_var.value, net), + attempts_to_share=bitcoin_data.target_to_average_attempts(node.tracker.items[node.best_share_var.value].max_target), + attempts_to_block=bitcoin_data.target_to_average_attempts(node.bitcoind_work.value['bits'].target), + block_value=node.bitcoind_work.value['subsidy']*1e-8, + warnings=p2pool_data.get_warnings(node.tracker, node.best_share_var.value, node.net, bitcoind_getinfo_var.value, node.bitcoind_work.value), + donation_proportion=wb.donation_percentage/100, + version=p2pool.__version__, + protocol_version=p2p.Protocol.VERSION, + fee=wb.worker_fee, ) - class WebInterface(resource.Resource): + class WebInterface(deferred_resource.DeferredResource): def __init__(self, func, mime_type='application/json', args=()): - resource.Resource.__init__(self) + deferred_resource.DeferredResource.__init__(self) self.func, self.mime_type, self.args = func, mime_type, args def getChild(self, child, request): return WebInterface(self.func, self.mime_type, self.args + (child,)) + @defer.inlineCallbacks def render_GET(self, request): request.setHeader('Content-Type', self.mime_type) request.setHeader('Access-Control-Allow-Origin', '*') - res = self.func(*self.args) - return json.dumps(res) if self.mime_type == 'application/json' else res + res = yield self.func(*self.args) + defer.returnValue(json.dumps(res) if self.mime_type == 'application/json' else res) - web_root.putChild('rate', WebInterface(lambda: p2pool_data.get_pool_attempts_per_second(tracker, best_share_var.value, 720)/(1-p2pool_data.get_average_stale_prop(tracker, best_share_var.value, 720)))) - web_root.putChild('difficulty', WebInterface(lambda: bitcoin_data.target_to_difficulty(tracker.shares[best_share_var.value].max_target))) + def decent_height(): + return min(node.tracker.get_height(node.best_share_var.value), 720) + web_root.putChild('rate', WebInterface(lambda: p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, decent_height())/(1-p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, decent_height())))) + web_root.putChild('difficulty', WebInterface(lambda: bitcoin_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target))) web_root.putChild('users', WebInterface(get_users)) - web_root.putChild('fee', WebInterface(lambda: worker_fee)) - web_root.putChild('current_payouts', WebInterface(lambda: dict((bitcoin_data.script2_to_address(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems()))) + web_root.putChild('user_stales', WebInterface(lambda: dict((bitcoin_data.pubkey_hash_to_address(ph, node.net.PARENT), prop) for ph, prop in + p2pool_data.get_user_stale_props(node.tracker, node.best_share_var.value, node.tracker.get_height(node.best_share_var.value)).iteritems()))) + web_root.putChild('fee', WebInterface(lambda: wb.worker_fee)) + web_root.putChild('current_payouts', WebInterface(lambda: dict((bitcoin_data.script2_to_address(script, node.net.PARENT), value/1e8) for script, value in node.get_current_txouts().iteritems()))) web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain')) web_root.putChild('global_stats', WebInterface(get_global_stats)) web_root.putChild('local_stats', WebInterface(get_local_stats)) - web_root.putChild('peer_addresses', WebInterface(lambda: ['%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port) for peer in p2p_node.peers.itervalues()])) - web_root.putChild('peer_versions', WebInterface(lambda: dict(('%s:%i' % peer.addr, peer.other_sub_version) for peer in p2p_node.peers.itervalues()))) - web_root.putChild('payout_addr', WebInterface(lambda: bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT))) - web_root.putChild('recent_blocks', WebInterface(lambda: [dict(ts=s.timestamp, hash='%064x' % s.header_hash) for s in tracker.get_chain(best_share_var.value, 24*60*60//net.SHARE_PERIOD) if s.pow_hash <= s.header['bits'].target])) + web_root.putChild('peer_addresses', WebInterface(lambda: ' '.join('%s%s' % (peer.transport.getPeer().host, ':'+str(peer.transport.getPeer().port) if peer.transport.getPeer().port != node.net.P2P_PORT else '') for peer in node.p2p_node.peers.itervalues()))) + web_root.putChild('peer_txpool_sizes', WebInterface(lambda: dict(('%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port), peer.remembered_txs_size) for peer in node.p2p_node.peers.itervalues()))) + web_root.putChild('pings', WebInterface(defer.inlineCallbacks(lambda: defer.returnValue( + dict([(a, (yield b)) for a, b in + [( + '%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port), + defer.inlineCallbacks(lambda peer=peer: defer.returnValue( + min([(yield peer.do_ping().addCallback(lambda x: x/0.001).addErrback(lambda fail: None)) for i in xrange(3)]) + ))() + ) for peer in list(node.p2p_node.peers.itervalues())] + ]) + )))) + web_root.putChild('peer_versions', WebInterface(lambda: dict(('%s:%i' % peer.addr, peer.other_sub_version) for peer in node.p2p_node.peers.itervalues()))) + web_root.putChild('payout_addr', WebInterface(lambda: bitcoin_data.pubkey_hash_to_address(wb.my_pubkey_hash, node.net.PARENT))) + web_root.putChild('recent_blocks', WebInterface(lambda: [dict( + ts=s.timestamp, + hash='%064x' % s.header_hash, + number=pack.IntType(24).unpack(s.share_data['coinbase'][1:4]) if len(s.share_data['coinbase']) >= 4 else None, + share='%064x' % s.hash, + ) for s in node.tracker.get_chain(node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 24*60*60//node.net.SHARE_PERIOD)) if s.pow_hash <= s.header['bits'].target])) web_root.putChild('uptime', WebInterface(lambda: time.time() - start_time)) - web_root.putChild('stale_rates', WebInterface(lambda: p2pool_data.get_stale_counts(tracker, best_share_var.value, 720, rates=True))) + web_root.putChild('stale_rates', WebInterface(lambda: p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, decent_height(), rates=True))) new_root = resource.Resource() web_root.putChild('web', new_root) @@ -219,60 +238,65 @@ def get_web_root(tracker, bitcoind_work, get_current_txouts, datadir_path, net, while stat_log and stat_log[0]['time'] < time.time() - 24*60*60: stat_log.pop(0) - lookbehind = 3600//net.SHARE_PERIOD - if tracker.get_height(best_share_var.value) < lookbehind: + lookbehind = 3600//node.net.SHARE_PERIOD + if node.tracker.get_height(node.best_share_var.value) < lookbehind: return None - global_stale_prop = p2pool_data.get_average_stale_prop(tracker, best_share_var.value, lookbehind) - (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts() - miner_hash_rates, miner_dead_hash_rates = get_local_rates() + global_stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind) + (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() + miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates() stat_log.append(dict( time=time.time(), - pool_hash_rate=p2pool_data.get_pool_attempts_per_second(tracker, best_share_var.value, lookbehind)/(1-global_stale_prop), + pool_hash_rate=p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, lookbehind)/(1-global_stale_prop), pool_stale_prop=global_stale_prop, local_hash_rates=miner_hash_rates, local_dead_hash_rates=miner_dead_hash_rates, shares=shares, stale_shares=stale_orphan_shares + stale_doa_shares, stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares), - current_payout=get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8, + current_payout=node.get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(wb.my_pubkey_hash), 0)*1e-8, peers=dict( - incoming=sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming), - outgoing=sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming), + incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), + outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming), ), - attempts_to_share=bitcoin_data.target_to_average_attempts(tracker.shares[best_share_var.value].max_target), - attempts_to_block=bitcoin_data.target_to_average_attempts(bitcoind_work.value['bits'].target), - block_value=bitcoind_work.value['subsidy']*1e-8, + attempts_to_share=bitcoin_data.target_to_average_attempts(node.tracker.items[node.best_share_var.value].max_target), + attempts_to_block=bitcoin_data.target_to_average_attempts(node.bitcoind_work.value['bits'].target), + block_value=node.bitcoind_work.value['subsidy']*1e-8, )) with open(os.path.join(datadir_path, 'stats'), 'wb') as f: f.write(json.dumps(stat_log)) - task.LoopingCall(update_stat_log).start(5*60) + x = deferral.RobustLoopingCall(update_stat_log) + x.start(5*60) + stop_event.watch(x.stop) new_root.putChild('log', WebInterface(lambda: stat_log)) def get_share(share_hash_str): - if int(share_hash_str, 16) not in tracker.shares: + if int(share_hash_str, 16) not in node.tracker.items: return None - share = tracker.shares[int(share_hash_str, 16)] + share = node.tracker.items[int(share_hash_str, 16)] return dict( parent='%064x' % share.previous_hash, - children=['%064x' % x for x in sorted(tracker.reverse_shares.get(share.hash, set()), key=lambda sh: -len(tracker.reverse_shares.get(sh, set())))], # sorted from most children to least children + children=['%064x' % x for x in sorted(node.tracker.reverse.get(share.hash, set()), key=lambda sh: -len(node.tracker.reverse.get(sh, set())))], # sorted from most children to least children + type_name=type(share).__name__, local=dict( - verified=share.hash in tracker.verified.shares, + verified=share.hash in node.tracker.verified.items, time_first_seen=start_time if share.time_seen == 0 else share.time_seen, - peer_first_received_from=share.peer.addr if share.peer is not None else None, + peer_first_received_from=share.peer_addr, ), share_data=dict( timestamp=share.timestamp, target=share.target, max_target=share.max_target, - payout_address=bitcoin_data.script2_to_address(share.new_script, net.PARENT), + payout_address=bitcoin_data.script2_to_address(share.new_script, node.net.PARENT), donation=share.share_data['donation']/65535, stale_info=share.share_data['stale_info'], nonce=share.share_data['nonce'], desired_version=share.share_data['desired_version'], + absheight=share.absheight, + abswork=share.abswork, ), block=dict( hash='%064x' % share.header_hash, @@ -288,20 +312,28 @@ def get_web_root(tracker, bitcoind_work, get_current_txouts, datadir_path, net, hash='%064x' % share.gentx_hash, coinbase=share.share_data['coinbase'].ljust(2, '\x00').encode('hex'), value=share.share_data['subsidy']*1e-8, + last_txout_nonce='%016x' % share.contents['last_txout_nonce'], ), - txn_count_range=[len(share.other_txs), len(share.other_txs)] if share.other_txs is not None else 1 if len(share.merkle_link['branch']) == 0 else [2**len(share.merkle_link['branch'])//2+1, 2**len(share.merkle_link['branch'])], + other_transaction_hashes=['%064x' % x for x in share.get_other_tx_hashes(node.tracker)], ), ) new_root.putChild('share', WebInterface(lambda share_hash_str: get_share(share_hash_str))) - new_root.putChild('heads', WebInterface(lambda: ['%064x' % x for x in tracker.heads])) - new_root.putChild('verified_heads', WebInterface(lambda: ['%064x' % x for x in tracker.verified.heads])) - new_root.putChild('tails', WebInterface(lambda: ['%064x' % x for t in tracker.tails for x in tracker.reverse_shares.get(t, set())])) - new_root.putChild('verified_tails', WebInterface(lambda: ['%064x' % x for t in tracker.verified.tails for x in tracker.verified.reverse_shares.get(t, set())])) - new_root.putChild('best_share_hash', WebInterface(lambda: '%064x' % best_share_var.value)) + new_root.putChild('heads', WebInterface(lambda: ['%064x' % x for x in node.tracker.heads])) + new_root.putChild('verified_heads', WebInterface(lambda: ['%064x' % x for x in node.tracker.verified.heads])) + new_root.putChild('tails', WebInterface(lambda: ['%064x' % x for t in node.tracker.tails for x in node.tracker.reverse.get(t, set())])) + new_root.putChild('verified_tails', WebInterface(lambda: ['%064x' % x for t in node.tracker.verified.tails for x in node.tracker.verified.reverse.get(t, set())])) + new_root.putChild('best_share_hash', WebInterface(lambda: '%064x' % node.best_share_var.value)) + def get_share_data(share_hash_str): + if int(share_hash_str, 16) not in node.tracker.items: + return '' + share = node.tracker.items[int(share_hash_str, 16)] + return p2pool_data.share_type.pack(share.as_share1a()) + new_root.putChild('share_data', WebInterface(lambda share_hash_str: get_share_data(share_hash_str), 'application/octet-stream')) new_root.putChild('currency_info', WebInterface(lambda: dict( - symbol=net.PARENT.SYMBOL, - block_explorer_url_prefix=net.PARENT.BLOCK_EXPLORER_URL_PREFIX, - address_explorer_url_prefix=net.PARENT.ADDRESS_EXPLORER_URL_PREFIX, + symbol=node.net.PARENT.SYMBOL, + block_explorer_url_prefix=node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, + address_explorer_url_prefix=node.net.PARENT.ADDRESS_EXPLORER_URL_PREFIX, + tx_explorer_url_prefix=node.net.PARENT.TX_EXPLORER_URL_PREFIX, ))) new_root.putChild('version', WebInterface(lambda: p2pool.__version__)) @@ -320,50 +352,43 @@ def get_web_root(tracker, bitcoind_work, get_current_txouts, datadir_path, net, 'last_month': graph.DataViewDescription(300, 60*60*24*30), 'last_year': graph.DataViewDescription(300, 60*60*24*365.25), } - def build_pool_rates(ds_name, ds_desc, dv_name, dv_desc, obj): + def build_peers(ds_name, ds_desc, dv_name, dv_desc, obj): if not obj: last_bin_end = 0 bins = dv_desc.bin_count*[{}] else: - pool_rate = obj['pool_rate'][dv_name] - pool_stale_rate = obj['pool_stale_rate'][dv_name] - last_bin_end = max(pool_rate['last_bin_end'], pool_stale_rate['last_bin_end']) - bins = dv_desc.bin_count*[{}] - def get_value(obj, t): - n = int((obj['last_bin_end'] - t)/dv_desc.bin_width) - if n < 0 or n >= dv_desc.bin_count: - return None, 0 - total, count = obj['bins'][n].get('null', [0, 0]) - if count == 0: - return None, 0 - return total/count, count - def get_bin(t): - total, total_count = get_value(pool_rate, t) - bad, bad_count = get_value(pool_stale_rate, t) - if total is None or bad is None: - return {} - count = int((total_count+bad_count)/2+1/2) - return dict(good=[(total-bad)*count, count], bad=[bad*count, count], null=[0, count]) - bins = [get_bin(last_bin_end - (i+1/2)*dv_desc.bin_width) for i in xrange(dv_desc.bin_count)] + incoming_peers = obj['incoming_peers'][dv_name] + outgoing_peers = obj['outgoing_peers'][dv_name] + assert incoming_peers['last_bin_end'] == outgoing_peers['last_bin_end'] + last_bin_end = incoming_peers['last_bin_end'] + assert len(incoming_peers['bins']) == len(outgoing_peers['bins']) == dv_desc.bin_count + bins = [dict(incoming=inc.get('null', (0, 0)), outgoing=out.get('null', (0, 0))) for inc, out in zip(incoming_peers['bins'], outgoing_peers['bins'])] return graph.DataView(dv_desc, ds_desc, last_bin_end, bins) hd = graph.HistoryDatabase.from_obj({ 'local_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False), 'local_dead_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False), 'local_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False), 'local_dead_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False), + 'local_orphan_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False), 'pool_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True, - multivalue_undefined_means_0=True, default_func=build_pool_rates), + multivalue_undefined_means_0=True), 'current_payout': graph.DataStreamDescription(dataview_descriptions), 'current_payouts': graph.DataStreamDescription(dataview_descriptions, multivalues=True), - 'incoming_peers': graph.DataStreamDescription(dataview_descriptions), - 'outgoing_peers': graph.DataStreamDescription(dataview_descriptions), + 'peers': graph.DataStreamDescription(dataview_descriptions, multivalues=True, default_func=build_peers), 'miner_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True), 'miner_dead_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True), 'desired_versions': graph.DataStreamDescription(dataview_descriptions, multivalues=True, multivalue_undefined_means_0=True), + 'desired_version_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True, + multivalue_undefined_means_0=True), + 'traffic_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True), + 'getwork_latency': graph.DataStreamDescription(dataview_descriptions), + 'memory_usage': graph.DataStreamDescription(dataview_descriptions), }, hd_obj) - task.LoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj()))).start(100) - @pseudoshare_received.watch + x = deferral.RobustLoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj()))) + x.start(100) + stop_event.watch(x.stop) + @wb.pseudoshare_received.watch def _(work, dead, user): t = time.time() hd.datastreams['local_hash_rate'].add_datum(t, work) @@ -373,29 +398,61 @@ def get_web_root(tracker, bitcoind_work, get_current_txouts, datadir_path, net, hd.datastreams['miner_hash_rates'].add_datum(t, {user: work}) if dead: hd.datastreams['miner_dead_hash_rates'].add_datum(t, {user: work}) - @share_received.watch - def _(work, dead): + @wb.share_received.watch + def _(work, dead, share_hash): t = time.time() hd.datastreams['local_share_hash_rate'].add_datum(t, work) if dead: hd.datastreams['local_dead_share_hash_rate'].add_datum(t, work) + def later(): + res = node.tracker.is_child_of(share_hash, node.best_share_var.value) + if res is None: return # share isn't connected to sharechain? + if res and dead: # share was DOA, but is now in sharechain + # remove from DOA graph + hd.datastreams['local_dead_share_hash_rate'].add_datum(t, -work) + elif not res and not dead: # share wasn't DOA, and isn't in sharechain + # add to orphan graph + hd.datastreams['local_orphan_share_hash_rate'].add_datum(t, work) + reactor.callLater(200, later) + @node.p2p_node.traffic_happened.watch + def _(name, bytes): + hd.datastreams['traffic_rate'].add_datum(time.time(), {name: bytes}) def add_point(): - if tracker.get_height(best_share_var.value) < 720: - return + if node.tracker.get_height(node.best_share_var.value) < 10: + return None + lookbehind = min(node.net.CHAIN_LENGTH, 60*60//node.net.SHARE_PERIOD, node.tracker.get_height(node.best_share_var.value)) t = time.time() - hd.datastreams['pool_rates'].add_datum(t, p2pool_data.get_stale_counts(tracker, best_share_var.value, 720, rates=True)) - current_txouts = get_current_txouts() - hd.datastreams['current_payout'].add_datum(t, current_txouts.get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8) - miner_hash_rates, miner_dead_hash_rates = get_local_rates() - current_txouts_by_address = dict((bitcoin_data.script2_to_address(script, net.PARENT), amount) for script, amount in current_txouts.iteritems()) + + pool_rates = p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, lookbehind, rates=True) + pool_total = sum(pool_rates.itervalues()) + hd.datastreams['pool_rates'].add_datum(t, pool_rates) + + current_txouts = node.get_current_txouts() + hd.datastreams['current_payout'].add_datum(t, current_txouts.get(bitcoin_data.pubkey_hash_to_script2(wb.my_pubkey_hash), 0)*1e-8) + miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates() + current_txouts_by_address = dict((bitcoin_data.script2_to_address(script, node.net.PARENT), amount) for script, amount in current_txouts.iteritems()) hd.datastreams['current_payouts'].add_datum(t, dict((user, current_txouts_by_address[user]*1e-8) for user in miner_hash_rates if user in current_txouts_by_address)) - hd.datastreams['incoming_peers'].add_datum(t, sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming)) - hd.datastreams['outgoing_peers'].add_datum(t, sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming)) - vs = p2pool_data.get_desired_version_counts(tracker, best_share_var.value, 720) + hd.datastreams['peers'].add_datum(t, dict( + incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), + outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming), + )) + + vs = p2pool_data.get_desired_version_counts(node.tracker, node.best_share_var.value, lookbehind) vs_total = sum(vs.itervalues()) hd.datastreams['desired_versions'].add_datum(t, dict((str(k), v/vs_total) for k, v in vs.iteritems())) - task.LoopingCall(add_point).start(5) + hd.datastreams['desired_version_rates'].add_datum(t, dict((str(k), v/vs_total*pool_total) for k, v in vs.iteritems())) + try: + hd.datastreams['memory_usage'].add_datum(t, memory.resident()) + except: + if p2pool.DEBUG: + traceback.print_exc() + x = deferral.RobustLoopingCall(add_point) + x.start(5) + stop_event.watch(x.stop) + @node.bitcoind_work.changed.watch + def _(new_work): + hd.datastreams['getwork_latency'].add_datum(time.time(), new_work['latency']) new_root.putChild('graph_data', WebInterface(lambda source, view: hd.datastreams[source].dataviews[view].get_data(time.time()))) web_root.putChild('static', static.File(os.path.join(os.path.dirname(sys.argv[0]), 'web-static')))