import os
import sys
import time
+import traceback
-from twisted.internet import reactor, task
+from twisted.internet import defer
from twisted.python import log
from twisted.web import resource, static
+import p2pool
from bitcoin import data as bitcoin_data
-from . import data as p2pool_data, graphs
-from util import graph, math
+from . import data as p2pool_data, p2p
+from util import deferral, deferred_resource, graph, math, memory, pack, variable
def _atomic_read(filename):
try:
pass
try:
os.rename(filename + '.new', filename)
- except os.error: # windows can't overwrite
+ except: # XXX windows can't overwrite
os.remove(filename)
os.rename(filename + '.new', filename)
-def get_web_root(tracker, current_work, current_work2, get_current_txouts, datadir_path, net, get_stale_counts, my_pubkey_hash, local_rate_monitor, worker_fee, p2p_node, my_share_hashes, recent_blocks, pseudoshare_received, share_received):
+def get_web_root(wb, datadir_path, bitcoind_warning_var, stop_event=variable.Event()):
+ node = wb.node
start_time = time.time()
web_root = resource.Resource()
def get_users():
- height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
- weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
+ height, last = node.tracker.get_height_and_last(node.best_share_var.value)
+ weights, total_weight, donation_weight = node.tracker.get_cumulative_weights(node.best_share_var.value, min(height, 720), 65535*2**256)
res = {}
for script in sorted(weights, key=lambda s: weights[s]):
- res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
- return json.dumps(res)
+ res[bitcoin_data.script2_to_address(script, node.net.PARENT)] = weights[script]/total_weight
+ return res
def get_current_scaled_txouts(scale, trunc=0):
- txouts = get_current_txouts()
+ txouts = node.get_current_txouts()
total = sum(txouts.itervalues())
results = dict((script, value*scale//total) for script, value in txouts.iteritems())
if trunc > 0:
return 'need total argument. go to patron_sendmany/<TOTAL>'
total = int(float(total)*1e8)
trunc = int(float(trunc)*1e8)
- return dict(
- (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
+ return json.dumps(dict(
+ (bitcoin_data.script2_to_address(script, node.net.PARENT), value/1e8)
for script, value in get_current_scaled_txouts(total, trunc).iteritems()
- if bitcoin_data.script2_to_address(script, net.PARENT) is not None
- )
+ if bitcoin_data.script2_to_address(script, node.net.PARENT) is not None
+ ))
def get_global_stats():
# averaged over last hour
- lookbehind = 3600//net.SHARE_PERIOD
- if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
+ if node.tracker.get_height(node.best_share_var.value) < 10:
return None
+ lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600//node.net.SHARE_PERIOD)
- nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
- stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
+ nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, lookbehind)
+ stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind)
return dict(
pool_nonstale_hash_rate=nonstale_hash_rate,
pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
pool_stale_prop=stale_prop,
+ min_difficulty=bitcoin_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target),
)
def get_local_stats():
- lookbehind = 3600//net.SHARE_PERIOD
- if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
+ if node.tracker.get_height(node.best_share_var.value) < 10:
return None
+ lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600//node.net.SHARE_PERIOD)
- global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
+ global_stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind)
- my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
- my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
- my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
+ my_unstale_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes)
+ my_orphan_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'orphan')
+ my_doa_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'doa')
my_share_count = my_unstale_count + my_orphan_count + my_doa_count
my_stale_count = my_orphan_count + my_doa_count
my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
- for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1)
- if share.hash in my_share_hashes)
- actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp -
- tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp)
+ for share in node.tracker.get_chain(node.best_share_var.value, lookbehind - 1)
+ if share.hash in wb.my_share_hashes)
+ actual_time = (node.tracker.items[node.best_share_var.value].timestamp -
+ node.tracker.items[node.tracker.get_nth_parent_hash(node.best_share_var.value, lookbehind - 1)].timestamp)
share_att_s = my_work / actual_time
- miner_hash_rates = {}
- miner_dead_hash_rates = {}
- datums, dt = local_rate_monitor.get_datums_in_last()
- for datum in datums:
- miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
- if datum['dead']:
- miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
-
- (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
+ miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates()
+ (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()
return dict(
my_hash_rates_in_last_hour=dict(
miner_hash_rates=miner_hash_rates,
miner_dead_hash_rates=miner_dead_hash_rates,
efficiency_if_miner_perfect=(1 - stale_orphan_shares/shares)/(1 - global_stale_prop) if shares else None, # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection
+ efficiency=(1 - (stale_orphan_shares+stale_doa_shares)/shares)/(1 - global_stale_prop) if shares else None,
+ peers=dict(
+ incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
+ outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming),
+ ),
+ shares=dict(
+ total=shares,
+ orphan=stale_orphan_shares,
+ dead=stale_doa_shares,
+ ),
+ uptime=time.time() - start_time,
+ attempts_to_share=bitcoin_data.target_to_average_attempts(node.tracker.items[node.best_share_var.value].max_target),
+ attempts_to_block=bitcoin_data.target_to_average_attempts(node.bitcoind_work.value['bits'].target),
+ block_value=node.bitcoind_work.value['subsidy']*1e-8,
+ warnings=p2pool_data.get_warnings(node.tracker, node.best_share_var.value, node.net, bitcoind_warning_var.value, node.bitcoind_work.value),
+ donation_proportion=wb.donation_percentage/100,
+ version=p2pool.__version__,
+ protocol_version=p2p.Protocol.VERSION,
+ fee=wb.worker_fee,
)
- class WebInterface(resource.Resource):
+ class WebInterface(deferred_resource.DeferredResource):
def __init__(self, func, mime_type='application/json', args=()):
- resource.Resource.__init__(self)
+ deferred_resource.DeferredResource.__init__(self)
self.func, self.mime_type, self.args = func, mime_type, args
def getChild(self, child, request):
return WebInterface(self.func, self.mime_type, self.args + (child,))
+ @defer.inlineCallbacks
def render_GET(self, request):
request.setHeader('Content-Type', self.mime_type)
request.setHeader('Access-Control-Allow-Origin', '*')
- res = self.func(*self.args)
- return json.dumps(res) if self.mime_type == 'application/json' else res
+ res = yield self.func(*self.args)
+ defer.returnValue(json.dumps(res) if self.mime_type == 'application/json' else res)
- web_root.putChild('rate', WebInterface(lambda: p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)/(1-p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))))
- web_root.putChild('difficulty', WebInterface(lambda: bitcoin_data.target_to_difficulty(tracker.shares[current_work.value['best_share_hash']].max_target)))
+ def decent_height():
+ return min(node.tracker.get_height(node.best_share_var.value), 720)
+ web_root.putChild('rate', WebInterface(lambda: p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, decent_height())/(1-p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, decent_height()))))
+ web_root.putChild('difficulty', WebInterface(lambda: bitcoin_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target)))
web_root.putChild('users', WebInterface(get_users))
- web_root.putChild('fee', WebInterface(lambda: worker_fee))
- web_root.putChild('current_payouts', WebInterface(lambda: dict((bitcoin_data.script2_to_address(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems())))
+ web_root.putChild('user_stales', WebInterface(lambda: dict((bitcoin_data.pubkey_hash_to_address(ph, node.net.PARENT), prop) for ph, prop in
+ p2pool_data.get_user_stale_props(node.tracker, node.best_share_var.value, node.tracker.get_height(node.best_share_var.value)).iteritems())))
+ web_root.putChild('fee', WebInterface(lambda: wb.worker_fee))
+ web_root.putChild('current_payouts', WebInterface(lambda: dict((bitcoin_data.script2_to_address(script, node.net.PARENT), value/1e8) for script, value in node.get_current_txouts().iteritems())))
web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain'))
web_root.putChild('global_stats', WebInterface(get_global_stats))
web_root.putChild('local_stats', WebInterface(get_local_stats))
- web_root.putChild('peer_addresses', WebInterface(lambda: ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues()), 'text/plain'))
- web_root.putChild('peer_versions', WebInterface(lambda: ''.join('%s:%i ' % peer.addr + peer.other_sub_version + '\n' for peer in p2p_node.peers.itervalues()), 'text/plain'))
- web_root.putChild('payout_addr', WebInterface(lambda: bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)))
- web_root.putChild('recent_blocks', WebInterface(lambda: recent_blocks))
+ web_root.putChild('peer_addresses', WebInterface(lambda: ' '.join('%s%s' % (peer.transport.getPeer().host, ':'+str(peer.transport.getPeer().port) if peer.transport.getPeer().port != node.net.P2P_PORT else '') for peer in node.p2p_node.peers.itervalues())))
+ web_root.putChild('peer_txpool_sizes', WebInterface(lambda: dict(('%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port), peer.remembered_txs_size) for peer in node.p2p_node.peers.itervalues())))
+ web_root.putChild('pings', WebInterface(defer.inlineCallbacks(lambda: defer.returnValue(
+ dict([(a, (yield b)) for a, b in
+ [(
+ '%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port),
+ defer.inlineCallbacks(lambda peer=peer: defer.returnValue(
+ min([(yield peer.do_ping().addCallback(lambda x: x/0.001).addErrback(lambda fail: None)) for i in xrange(3)])
+ ))()
+ ) for peer in list(node.p2p_node.peers.itervalues())]
+ ])
+ ))))
+ web_root.putChild('peer_versions', WebInterface(lambda: dict(('%s:%i' % peer.addr, peer.other_sub_version) for peer in node.p2p_node.peers.itervalues())))
+ web_root.putChild('payout_addr', WebInterface(lambda: bitcoin_data.pubkey_hash_to_address(wb.my_pubkey_hash, node.net.PARENT)))
+ web_root.putChild('recent_blocks', WebInterface(lambda: [dict(
+ ts=s.timestamp,
+ hash='%064x' % s.header_hash,
+ number=pack.IntType(24).unpack(s.share_data['coinbase'][1:4]) if len(s.share_data['coinbase']) >= 4 else None,
+ share='%064x' % s.hash,
+ ) for s in node.tracker.get_chain(node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 24*60*60//node.net.SHARE_PERIOD)) if s.pow_hash <= s.header['bits'].target]))
web_root.putChild('uptime', WebInterface(lambda: time.time() - start_time))
-
- try:
- from . import draw
- web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
- except ImportError:
- print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
+ web_root.putChild('stale_rates', WebInterface(lambda: p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, decent_height(), rates=True)))
new_root = resource.Resource()
web_root.putChild('web', new_root)
while stat_log and stat_log[0]['time'] < time.time() - 24*60*60:
stat_log.pop(0)
- lookbehind = 3600//net.SHARE_PERIOD
- if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
+ lookbehind = 3600//node.net.SHARE_PERIOD
+ if node.tracker.get_height(node.best_share_var.value) < lookbehind:
return None
- global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
- (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
-
- miner_hash_rates = {}
- miner_dead_hash_rates = {}
- datums, dt = local_rate_monitor.get_datums_in_last()
- for datum in datums:
- miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
- if datum['dead']:
- miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
+ global_stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind)
+ (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()
+ miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates()
stat_log.append(dict(
time=time.time(),
- pool_hash_rate=p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)/(1-global_stale_prop),
+ pool_hash_rate=p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, lookbehind)/(1-global_stale_prop),
pool_stale_prop=global_stale_prop,
local_hash_rates=miner_hash_rates,
local_dead_hash_rates=miner_dead_hash_rates,
shares=shares,
stale_shares=stale_orphan_shares + stale_doa_shares,
stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares),
- current_payout=get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8,
+ current_payout=node.get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(wb.my_pubkey_hash), 0)*1e-8,
peers=dict(
- incoming=sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
- outgoing=sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming),
+ incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
+ outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming),
),
- attempts_to_share=bitcoin_data.target_to_average_attempts(tracker.shares[current_work.value['best_share_hash']].max_target),
- attempts_to_block=bitcoin_data.target_to_average_attempts(current_work.value['bits'].target),
- block_value=current_work2.value['subsidy']*1e-8,
+ attempts_to_share=bitcoin_data.target_to_average_attempts(node.tracker.items[node.best_share_var.value].max_target),
+ attempts_to_block=bitcoin_data.target_to_average_attempts(node.bitcoind_work.value['bits'].target),
+ block_value=node.bitcoind_work.value['subsidy']*1e-8,
))
with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
f.write(json.dumps(stat_log))
- task.LoopingCall(update_stat_log).start(5*60)
+ x = deferral.RobustLoopingCall(update_stat_log)
+ x.start(5*60)
+ stop_event.watch(x.stop)
new_root.putChild('log', WebInterface(lambda: stat_log))
def get_share(share_hash_str):
- if int(share_hash_str, 16) not in tracker.shares:
+ if int(share_hash_str, 16) not in node.tracker.items:
return None
- share = tracker.shares[int(share_hash_str, 16)]
+ share = node.tracker.items[int(share_hash_str, 16)]
return dict(
parent='%064x' % share.previous_hash,
- children=['%064x' % x for x in sorted(tracker.reverse_shares.get(share.hash, set()), key=lambda sh: -len(tracker.reverse_shares.get(sh, set())))], # sorted from most children to least children
+ children=['%064x' % x for x in sorted(node.tracker.reverse.get(share.hash, set()), key=lambda sh: -len(node.tracker.reverse.get(sh, set())))], # sorted from most children to least children
+ type_name=type(share).__name__,
local=dict(
- verified=share.hash in tracker.verified.shares,
+ verified=share.hash in node.tracker.verified.items,
time_first_seen=start_time if share.time_seen == 0 else share.time_seen,
- peer_first_received_from=share.peer.addr if share.peer is not None else None,
+ peer_first_received_from=share.peer_addr,
),
share_data=dict(
timestamp=share.timestamp,
target=share.target,
max_target=share.max_target,
- payout_address=bitcoin_data.script2_to_address(share.new_script, net.PARENT),
+ payout_address=bitcoin_data.script2_to_address(share.new_script, node.net.PARENT),
donation=share.share_data['donation']/65535,
stale_info=share.share_data['stale_info'],
nonce=share.share_data['nonce'],
+ desired_version=share.share_data['desired_version'],
+ absheight=share.absheight,
+ abswork=share.abswork,
),
block=dict(
hash='%064x' % share.header_hash,
hash='%064x' % share.gentx_hash,
coinbase=share.share_data['coinbase'].ljust(2, '\x00').encode('hex'),
value=share.share_data['subsidy']*1e-8,
+ last_txout_nonce='%016x' % share.contents['last_txout_nonce'],
),
- txn_count_range=[len(share.other_txs), len(share.other_txs)] if share.other_txs is not None else 1 if len(share.merkle_link['branch']) == 0 else [2**len(share.merkle_link['branch'])//2+1, 2**len(share.merkle_link['branch'])],
+ other_transaction_hashes=['%064x' % x for x in share.get_other_tx_hashes(node.tracker)],
),
)
new_root.putChild('share', WebInterface(lambda share_hash_str: get_share(share_hash_str)))
- new_root.putChild('heads', WebInterface(lambda: ['%064x' % x for x in tracker.heads]))
- new_root.putChild('verified_heads', WebInterface(lambda: ['%064x' % x for x in tracker.verified.heads]))
- new_root.putChild('tails', WebInterface(lambda: ['%064x' % x for t in tracker.tails for x in tracker.reverse_shares.get(t, set())]))
- new_root.putChild('verified_tails', WebInterface(lambda: ['%064x' % x for t in tracker.verified.tails for x in tracker.verified.reverse_shares.get(t, set())]))
- new_root.putChild('best_share_hash', WebInterface(lambda: '%064x' % current_work.value['best_share_hash']))
-
- class Explorer(resource.Resource):
- def render_GET(self, request):
- return 'moved to /static/explorer.html'
- def getChild(self, child, request):
- return self
- new_root.putChild('explorer', Explorer())
-
- grapher = graphs.Grapher(os.path.join(datadir_path, 'rrd'))
- web_root.putChild('graphs', grapher.get_resource())
- def add_point():
- if tracker.get_height(current_work.value['best_share_hash']) < 720:
- return
- nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
- poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
- grapher.add_poolrate_point(poolrate, poolrate - nonstalerate)
- task.LoopingCall(add_point).start(100)
- @pseudoshare_received.watch
- def _(work, dead, user, had_vip_pass):
- reactor.callLater(1, grapher.add_localrate_point, work, dead)
- if user is not None and had_vip_pass:
- reactor.callLater(1, grapher.add_localminer_point, user, work, dead)
+ new_root.putChild('heads', WebInterface(lambda: ['%064x' % x for x in node.tracker.heads]))
+ new_root.putChild('verified_heads', WebInterface(lambda: ['%064x' % x for x in node.tracker.verified.heads]))
+ new_root.putChild('tails', WebInterface(lambda: ['%064x' % x for t in node.tracker.tails for x in node.tracker.reverse.get(t, set())]))
+ new_root.putChild('verified_tails', WebInterface(lambda: ['%064x' % x for t in node.tracker.verified.tails for x in node.tracker.verified.reverse.get(t, set())]))
+ new_root.putChild('best_share_hash', WebInterface(lambda: '%064x' % node.best_share_var.value))
+ def get_share_data(share_hash_str):
+ if int(share_hash_str, 16) not in node.tracker.items:
+ return ''
+ share = node.tracker.items[int(share_hash_str, 16)]
+ return p2pool_data.share_type.pack(share.as_share1a())
+ new_root.putChild('share_data', WebInterface(lambda share_hash_str: get_share_data(share_hash_str), 'application/octet-stream'))
+ new_root.putChild('currency_info', WebInterface(lambda: dict(
+ symbol=node.net.PARENT.SYMBOL,
+ block_explorer_url_prefix=node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
+ address_explorer_url_prefix=node.net.PARENT.ADDRESS_EXPLORER_URL_PREFIX,
+ tx_explorer_url_prefix=node.net.PARENT.TX_EXPLORER_URL_PREFIX,
+ )))
+ new_root.putChild('version', WebInterface(lambda: p2pool.__version__))
hd_path = os.path.join(datadir_path, 'graph_db')
hd_data = _atomic_read(hd_path)
'last_month': graph.DataViewDescription(300, 60*60*24*30),
'last_year': graph.DataViewDescription(300, 60*60*24*365.25),
}
- def combine_and_keep_largest(*dicts):
- res = {}
- for d in dicts:
- for k, v in d.iteritems():
- res[k] = res.get(k, 0) + v
- return dict((k, v) for k, v in sorted(res.iteritems(), key=lambda (k, v): v)[-30:] if v)
+ def build_desired_rates(ds_name, ds_desc, dv_name, dv_desc, obj):
+ if not obj:
+ last_bin_end = 0
+ bins = dv_desc.bin_count*[{}]
+ else:
+ pool_rates = obj['pool_rates'][dv_name]
+ desired_versions = obj['desired_versions'][dv_name]
+ def get_total_pool_rate(t):
+ n = int((pool_rates['last_bin_end'] - t)/dv_desc.bin_width)
+ if n < 0 or n >= dv_desc.bin_count:
+ return None
+ total = sum(x[0] for x in pool_rates['bins'][n].values())
+ count = math.mean(x[1] for x in pool_rates['bins'][n].values())
+ if count == 0:
+ return None
+ return total/count
+ last_bin_end = desired_versions['last_bin_end']
+ bins = [dict((name, (total*get_total_pool_rate(last_bin_end - (i+1/2)*dv_desc.bin_width), count)) for name, (total, count) in desired_versions['bins'][i].iteritems()) for i in xrange(dv_desc.bin_count)]
+ return graph.DataView(dv_desc, ds_desc, last_bin_end, bins)
hd = graph.HistoryDatabase.from_obj({
- 'local_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
- 'local_dead_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
- 'local_share_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
- 'local_dead_share_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
- 'pool_rate': graph.DataStreamDescription(True, dataview_descriptions),
- 'pool_stale_rate': graph.DataStreamDescription(True, dataview_descriptions),
- 'current_payout': graph.DataStreamDescription(True, dataview_descriptions),
- 'incoming_peers': graph.DataStreamDescription(True, dataview_descriptions),
- 'outgoing_peers': graph.DataStreamDescription(True, dataview_descriptions),
- 'miner_hash_rates': graph.DataStreamDescription(False, dataview_descriptions, {}, combine_and_keep_largest, math.mult_dict),
- 'miner_dead_hash_rates': graph.DataStreamDescription(False, dataview_descriptions, {}, combine_and_keep_largest, math.mult_dict),
+ 'local_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
+ 'local_dead_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
+ 'local_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
+ 'local_dead_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
+ 'pool_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
+ multivalue_undefined_means_0=True),
+ 'current_payout': graph.DataStreamDescription(dataview_descriptions),
+ 'current_payouts': graph.DataStreamDescription(dataview_descriptions, multivalues=True),
+ 'incoming_peers': graph.DataStreamDescription(dataview_descriptions),
+ 'outgoing_peers': graph.DataStreamDescription(dataview_descriptions),
+ 'miner_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
+ 'miner_dead_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
+ 'desired_versions': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
+ multivalue_undefined_means_0=True),
+ 'desired_version_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
+ multivalue_undefined_means_0=True, default_func=build_desired_rates),
+ 'traffic_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
+ 'getwork_latency': graph.DataStreamDescription(dataview_descriptions),
+ 'memory_usage': graph.DataStreamDescription(dataview_descriptions),
}, hd_obj)
- task.LoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj()))).start(100)
- @pseudoshare_received.watch
- def _(work, dead, user, had_vip_pass):
+ x = deferral.RobustLoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj())))
+ x.start(100)
+ stop_event.watch(x.stop)
+ @wb.pseudoshare_received.watch
+ def _(work, dead, user):
t = time.time()
hd.datastreams['local_hash_rate'].add_datum(t, work)
if dead:
hd.datastreams['miner_hash_rates'].add_datum(t, {user: work})
if dead:
hd.datastreams['miner_dead_hash_rates'].add_datum(t, {user: work})
- @share_received.watch
+ @wb.share_received.watch
def _(work, dead):
t = time.time()
hd.datastreams['local_share_hash_rate'].add_datum(t, work)
if dead:
hd.datastreams['local_dead_share_hash_rate'].add_datum(t, work)
+ @node.p2p_node.traffic_happened.watch
+ def _(name, bytes):
+ hd.datastreams['traffic_rate'].add_datum(time.time(), {name: bytes})
def add_point():
- if tracker.get_height(current_work.value['best_share_hash']) < 720:
- return
- nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
- poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
+ if node.tracker.get_height(node.best_share_var.value) < 10:
+ return None
+ lookbehind = min(node.net.CHAIN_LENGTH, 60*60//node.net.SHARE_PERIOD, node.tracker.get_height(node.best_share_var.value))
t = time.time()
- hd.datastreams['pool_rate'].add_datum(t, poolrate)
- hd.datastreams['pool_stale_rate'].add_datum(t, poolrate - nonstalerate)
- hd.datastreams['current_payout'].add_datum(t, get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8)
- hd.datastreams['incoming_peers'].add_datum(t, sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming))
- hd.datastreams['outgoing_peers'].add_datum(t, sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming))
- task.LoopingCall(add_point).start(5)
+
+ pool_rates = p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, lookbehind, rates=True)
+ pool_total = sum(pool_rates.itervalues())
+ hd.datastreams['pool_rates'].add_datum(t, pool_rates)
+
+ current_txouts = node.get_current_txouts()
+ hd.datastreams['current_payout'].add_datum(t, current_txouts.get(bitcoin_data.pubkey_hash_to_script2(wb.my_pubkey_hash), 0)*1e-8)
+ miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates()
+ current_txouts_by_address = dict((bitcoin_data.script2_to_address(script, node.net.PARENT), amount) for script, amount in current_txouts.iteritems())
+ hd.datastreams['current_payouts'].add_datum(t, dict((user, current_txouts_by_address[user]*1e-8) for user in miner_hash_rates if user in current_txouts_by_address))
+
+ hd.datastreams['incoming_peers'].add_datum(t, sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming))
+ hd.datastreams['outgoing_peers'].add_datum(t, sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming))
+
+ vs = p2pool_data.get_desired_version_counts(node.tracker, node.best_share_var.value, lookbehind)
+ vs_total = sum(vs.itervalues())
+ hd.datastreams['desired_versions'].add_datum(t, dict((str(k), v/vs_total) for k, v in vs.iteritems()))
+ hd.datastreams['desired_version_rates'].add_datum(t, dict((str(k), v/vs_total*pool_total) for k, v in vs.iteritems()))
+ try:
+ hd.datastreams['memory_usage'].add_datum(t, memory.resident())
+ except:
+ if p2pool.DEBUG:
+ traceback.print_exc()
+ x = deferral.RobustLoopingCall(add_point)
+ x.start(5)
+ stop_event.watch(x.stop)
+ @node.bitcoind_work.changed.watch
+ def _(new_work):
+ hd.datastreams['getwork_latency'].add_datum(time.time(), new_work['latency'])
new_root.putChild('graph_data', WebInterface(lambda source, view: hd.datastreams[source].dataviews[view].get_data(time.time())))
web_root.putChild('static', static.File(os.path.join(os.path.dirname(sys.argv[0]), 'web-static')))