import time
import traceback
-from twisted.internet import defer, task
+from twisted.internet import defer, reactor
from twisted.python import log
from twisted.web import resource, static
import p2pool
from bitcoin import data as bitcoin_data
-from . import data as p2pool_data
-from util import deferred_resource, graph, math, memory, pack, variable
+from . import data as p2pool_data, p2p
+from util import deferral, deferred_resource, graph, math, memory, pack, variable
def _atomic_read(filename):
try:
os.remove(filename)
os.rename(filename + '.new', filename)
-def get_web_root(wb, datadir_path, bitcoind_warning_var, stop_event=variable.Event()):
+def get_web_root(wb, datadir_path, bitcoind_getinfo_var, stop_event=variable.Event()):
node = wb.node
start_time = time.time()
attempts_to_share=bitcoin_data.target_to_average_attempts(node.tracker.items[node.best_share_var.value].max_target),
attempts_to_block=bitcoin_data.target_to_average_attempts(node.bitcoind_work.value['bits'].target),
block_value=node.bitcoind_work.value['subsidy']*1e-8,
- warnings=p2pool_data.get_warnings(node.tracker, node.best_share_var.value, node.net, bitcoind_warning_var.value, node.bitcoind_work.value),
+ warnings=p2pool_data.get_warnings(node.tracker, node.best_share_var.value, node.net, bitcoind_getinfo_var.value, node.bitcoind_work.value),
donation_proportion=wb.donation_percentage/100,
+ version=p2pool.__version__,
+ protocol_version=p2p.Protocol.VERSION,
+ fee=wb.worker_fee,
)
class WebInterface(deferred_resource.DeferredResource):
with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
f.write(json.dumps(stat_log))
- x = task.LoopingCall(update_stat_log)
+ x = deferral.RobustLoopingCall(update_stat_log)
x.start(5*60)
stop_event.watch(x.stop)
new_root.putChild('log', WebInterface(lambda: stat_log))
hash='%064x' % share.gentx_hash,
coinbase=share.share_data['coinbase'].ljust(2, '\x00').encode('hex'),
value=share.share_data['subsidy']*1e-8,
+ last_txout_nonce='%016x' % share.contents['last_txout_nonce'],
),
- txn_count=len(list(share.iter_transaction_hash_refs())),
+ other_transaction_hashes=['%064x' % x for x in share.get_other_tx_hashes(node.tracker)],
),
)
new_root.putChild('share', WebInterface(lambda share_hash_str: get_share(share_hash_str)))
symbol=node.net.PARENT.SYMBOL,
block_explorer_url_prefix=node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
address_explorer_url_prefix=node.net.PARENT.ADDRESS_EXPLORER_URL_PREFIX,
+ tx_explorer_url_prefix=node.net.PARENT.TX_EXPLORER_URL_PREFIX,
)))
new_root.putChild('version', WebInterface(lambda: p2pool.__version__))
'last_month': graph.DataViewDescription(300, 60*60*24*30),
'last_year': graph.DataViewDescription(300, 60*60*24*365.25),
}
- def build_desired_rates(ds_name, ds_desc, dv_name, dv_desc, obj):
- if not obj:
- last_bin_end = 0
- bins = dv_desc.bin_count*[{}]
- else:
- pool_rates = obj['pool_rates'][dv_name]
- desired_versions = obj['desired_versions'][dv_name]
- def get_total_pool_rate(t):
- n = int((pool_rates['last_bin_end'] - t)/dv_desc.bin_width)
- if n < 0 or n >= dv_desc.bin_count:
- return None
- total = sum(x[0] for x in pool_rates['bins'][n].values())
- count = math.mean(x[1] for x in pool_rates['bins'][n].values())
- if count == 0:
- return None
- return total/count
- last_bin_end = desired_versions['last_bin_end']
- bins = [dict((name, (total*get_total_pool_rate(last_bin_end - (i+1/2)*dv_desc.bin_width), count)) for name, (total, count) in desired_versions['bins'][i].iteritems()) for i in xrange(dv_desc.bin_count)]
- return graph.DataView(dv_desc, ds_desc, last_bin_end, bins)
hd = graph.HistoryDatabase.from_obj({
'local_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
'local_dead_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
- 'local_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
- 'local_dead_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
+ 'local_share_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False,
+ multivalues=True, multivalue_undefined_means_0=True,
+ default_func=graph.make_multivalue_migrator(dict(good='local_share_hash_rate', dead='local_dead_share_hash_rate', orphan='local_orphan_share_hash_rate'),
+ post_func=lambda bins: [dict((k, (v[0] - (sum(bin.get(rem_k, (0, 0))[0] for rem_k in ['dead', 'orphan']) if k == 'good' else 0), v[1])) for k, v in bin.iteritems()) for bin in bins])),
'pool_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
multivalue_undefined_means_0=True),
'current_payout': graph.DataStreamDescription(dataview_descriptions),
'current_payouts': graph.DataStreamDescription(dataview_descriptions, multivalues=True),
- 'incoming_peers': graph.DataStreamDescription(dataview_descriptions),
- 'outgoing_peers': graph.DataStreamDescription(dataview_descriptions),
+ 'peers': graph.DataStreamDescription(dataview_descriptions, multivalues=True, default_func=graph.make_multivalue_migrator(dict(incoming='incoming_peers', outgoing='outgoing_peers'))),
'miner_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
'miner_dead_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
- 'desired_versions': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
- multivalue_undefined_means_0=True),
'desired_version_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
- multivalue_undefined_means_0=True, default_func=build_desired_rates),
+ multivalue_undefined_means_0=True),
'traffic_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
'getwork_latency': graph.DataStreamDescription(dataview_descriptions),
'memory_usage': graph.DataStreamDescription(dataview_descriptions),
}, hd_obj)
- x = task.LoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj())))
+ x = deferral.RobustLoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj())))
x.start(100)
stop_event.watch(x.stop)
@wb.pseudoshare_received.watch
if dead:
hd.datastreams['miner_dead_hash_rates'].add_datum(t, {user: work})
@wb.share_received.watch
- def _(work, dead):
+ def _(work, dead, share_hash):
t = time.time()
- hd.datastreams['local_share_hash_rate'].add_datum(t, work)
- if dead:
- hd.datastreams['local_dead_share_hash_rate'].add_datum(t, work)
+ if not dead:
+ hd.datastreams['local_share_hash_rates'].add_datum(t, dict(good=work))
+ else:
+ hd.datastreams['local_share_hash_rates'].add_datum(t, dict(dead=work))
+ def later():
+ res = node.tracker.is_child_of(share_hash, node.best_share_var.value)
+ if res is None: res = False # share isn't connected to sharechain? assume orphaned
+ if res and dead: # share was DOA, but is now in sharechain
+ # move from dead to good
+ hd.datastreams['local_share_hash_rates'].add_datum(t, dict(dead=-work, good=work))
+ elif not res and not dead: # share wasn't DOA, and isn't in sharechain
+ # move from good to orphan
+ hd.datastreams['local_share_hash_rates'].add_datum(t, dict(good=-work, orphan=work))
+ reactor.callLater(200, later)
@node.p2p_node.traffic_happened.watch
def _(name, bytes):
hd.datastreams['traffic_rate'].add_datum(time.time(), {name: bytes})
current_txouts_by_address = dict((bitcoin_data.script2_to_address(script, node.net.PARENT), amount) for script, amount in current_txouts.iteritems())
hd.datastreams['current_payouts'].add_datum(t, dict((user, current_txouts_by_address[user]*1e-8) for user in miner_hash_rates if user in current_txouts_by_address))
- hd.datastreams['incoming_peers'].add_datum(t, sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming))
- hd.datastreams['outgoing_peers'].add_datum(t, sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming))
+ hd.datastreams['peers'].add_datum(t, dict(
+ incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
+ outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming),
+ ))
vs = p2pool_data.get_desired_version_counts(node.tracker, node.best_share_var.value, lookbehind)
vs_total = sum(vs.itervalues())
- hd.datastreams['desired_versions'].add_datum(t, dict((str(k), v/vs_total) for k, v in vs.iteritems()))
hd.datastreams['desired_version_rates'].add_datum(t, dict((str(k), v/vs_total*pool_total) for k, v in vs.iteritems()))
try:
hd.datastreams['memory_usage'].add_datum(t, memory.resident())
except:
if p2pool.DEBUG:
traceback.print_exc()
- x = task.LoopingCall(add_point)
+ x = deferral.RobustLoopingCall(add_point)
x.start(5)
stop_event.watch(x.stop)
@node.bitcoind_work.changed.watch