from twisted.python import log
from twisted.web import resource, static
+import p2pool
from bitcoin import data as bitcoin_data
from . import data as p2pool_data
from util import graph, math
pass
try:
os.rename(filename + '.new', filename)
- except os.error: # windows can't overwrite
+ except: # XXX windows can't overwrite
os.remove(filename)
os.rename(filename + '.new', filename)
-def get_web_root(tracker, current_work, current_work2, get_current_txouts, datadir_path, net, get_stale_counts, my_pubkey_hash, local_rate_monitor, worker_fee, p2p_node, my_share_hashes, pseudoshare_received, share_received):
+def get_web_root(tracker, current_work, get_current_txouts, datadir_path, net, get_stale_counts, my_pubkey_hash, local_rate_monitor, worker_fee, p2p_node, my_share_hashes, pseudoshare_received, share_received):
start_time = time.time()
web_root = resource.Resource()
return 'need total argument. go to patron_sendmany/<TOTAL>'
total = int(float(total)*1e8)
trunc = int(float(trunc)*1e8)
- return dict(
+ return json.dumps(dict(
(bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
for script, value in get_current_scaled_txouts(total, trunc).iteritems()
if bitcoin_data.script2_to_address(script, net.PARENT) is not None
- )
+ ))
def get_local_rates():
miner_hash_rates = {}
pool_nonstale_hash_rate=nonstale_hash_rate,
pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
pool_stale_prop=stale_prop,
+ min_difficulty=bitcoin_data.target_to_difficulty(tracker.shares[current_work.value['best_share_hash']].max_target),
)
def get_local_stats():
global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
- my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
- my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
+ my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 'orphan')
+ my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 'doa')
my_share_count = my_unstale_count + my_orphan_count + my_doa_count
my_stale_count = my_orphan_count + my_doa_count
miner_hash_rates=miner_hash_rates,
miner_dead_hash_rates=miner_dead_hash_rates,
efficiency_if_miner_perfect=(1 - stale_orphan_shares/shares)/(1 - global_stale_prop) if shares else None, # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection
+ efficiency=(1 - (stale_orphan_shares+stale_doa_shares)/shares)/(1 - global_stale_prop) if shares else None,
+ peers=dict(
+ incoming=sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
+ outgoing=sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming),
+ ),
+ shares=dict(
+ total=shares,
+ orphan=stale_orphan_shares,
+ dead=stale_doa_shares,
+ ),
+ uptime=time.time() - start_time,
+ block_value=current_work.value['subsidy']*1e-8,
+ warnings=p2pool_data.get_warnings(tracker, current_work, net),
)
class WebInterface(resource.Resource):
web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain'))
web_root.putChild('global_stats', WebInterface(get_global_stats))
web_root.putChild('local_stats', WebInterface(get_local_stats))
- web_root.putChild('peer_addresses', WebInterface(lambda: ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues()), 'text/plain'))
- web_root.putChild('peer_versions', WebInterface(lambda: ''.join('%s:%i ' % peer.addr + peer.other_sub_version + '\n' for peer in p2p_node.peers.itervalues()), 'text/plain'))
+ web_root.putChild('peer_addresses', WebInterface(lambda: ['%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port) for peer in p2p_node.peers.itervalues()]))
+ web_root.putChild('peer_versions', WebInterface(lambda: dict(('%s:%i' % peer.addr, peer.other_sub_version) for peer in p2p_node.peers.itervalues())))
web_root.putChild('payout_addr', WebInterface(lambda: bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)))
web_root.putChild('recent_blocks', WebInterface(lambda: [dict(ts=s.timestamp, hash='%064x' % s.header_hash) for s in tracker.get_chain(current_work.value['best_share_hash'], 24*60*60//net.SHARE_PERIOD) if s.pow_hash <= s.header['bits'].target]))
web_root.putChild('uptime', WebInterface(lambda: time.time() - start_time))
+ web_root.putChild('stale_rates', WebInterface(lambda: p2pool_data.get_stale_counts(tracker, current_work.value['best_share_hash'], 720, rates=True)))
new_root = resource.Resource()
web_root.putChild('web', new_root)
),
attempts_to_share=bitcoin_data.target_to_average_attempts(tracker.shares[current_work.value['best_share_hash']].max_target),
attempts_to_block=bitcoin_data.target_to_average_attempts(current_work.value['bits'].target),
- block_value=current_work2.value['subsidy']*1e-8,
+ block_value=current_work.value['subsidy']*1e-8,
))
with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
donation=share.share_data['donation']/65535,
stale_info=share.share_data['stale_info'],
nonce=share.share_data['nonce'],
+ desired_version=share.share_data['desired_version'],
),
block=dict(
hash='%064x' % share.header_hash,
new_root.putChild('tails', WebInterface(lambda: ['%064x' % x for t in tracker.tails for x in tracker.reverse_shares.get(t, set())]))
new_root.putChild('verified_tails', WebInterface(lambda: ['%064x' % x for t in tracker.verified.tails for x in tracker.verified.reverse_shares.get(t, set())]))
new_root.putChild('best_share_hash', WebInterface(lambda: '%064x' % current_work.value['best_share_hash']))
-
- class Explorer(resource.Resource):
- def render_GET(self, request):
- return 'moved to /static/'
- def getChild(self, child, request):
- return self
- new_root.putChild('explorer', Explorer())
+ new_root.putChild('currency_info', WebInterface(lambda: dict(
+ symbol=net.PARENT.SYMBOL,
+ block_explorer_url_prefix=net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
+ address_explorer_url_prefix=net.PARENT.ADDRESS_EXPLORER_URL_PREFIX,
+ )))
+ new_root.putChild('version', WebInterface(lambda: p2pool.__version__))
hd_path = os.path.join(datadir_path, 'graph_db')
hd_data = _atomic_read(hd_path)
'last_month': graph.DataViewDescription(300, 60*60*24*30),
'last_year': graph.DataViewDescription(300, 60*60*24*365.25),
}
+ def build_pool_rates(ds_name, ds_desc, dv_name, dv_desc, obj):
+ if not obj:
+ last_bin_end = 0
+ bins = dv_desc.bin_count*[{}]
+ else:
+ pool_rate = obj['pool_rate'][dv_name]
+ pool_stale_rate = obj['pool_stale_rate'][dv_name]
+ last_bin_end = max(pool_rate['last_bin_end'], pool_stale_rate['last_bin_end'])
+ bins = dv_desc.bin_count*[{}]
+ def get_value(obj, t):
+ n = int((obj['last_bin_end'] - t)/dv_desc.bin_width)
+ if n < 0 or n >= dv_desc.bin_count:
+ return None, 0
+ total, count = obj['bins'][n].get('null', [0, 0])
+ if count == 0:
+ return None, 0
+ return total/count, count
+ def get_bin(t):
+ total, total_count = get_value(pool_rate, t)
+ bad, bad_count = get_value(pool_stale_rate, t)
+ if total is None or bad is None:
+ return {}
+ count = int((total_count+bad_count)/2+1/2)
+ return dict(good=[(total-bad)*count, count], bad=[bad*count, count], null=[0, count])
+ bins = [get_bin(last_bin_end - (i+1/2)*dv_desc.bin_width) for i in xrange(dv_desc.bin_count)]
+ return graph.DataView(dv_desc, ds_desc, last_bin_end, bins)
hd = graph.HistoryDatabase.from_obj({
- 'local_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
- 'local_dead_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
- 'local_share_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
- 'local_dead_share_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
- 'pool_rate': graph.DataStreamDescription(True, dataview_descriptions),
- 'pool_stale_rate': graph.DataStreamDescription(True, dataview_descriptions),
- 'current_payout': graph.DataStreamDescription(True, dataview_descriptions),
- 'current_payouts': graph.DataStreamDescription(True, dataview_descriptions, multivalues=True),
- 'incoming_peers': graph.DataStreamDescription(True, dataview_descriptions),
- 'outgoing_peers': graph.DataStreamDescription(True, dataview_descriptions),
- 'miner_hash_rates': graph.DataStreamDescription(False, dataview_descriptions, multivalues=True),
- 'miner_dead_hash_rates': graph.DataStreamDescription(False, dataview_descriptions, multivalues=True),
+ 'local_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
+ 'local_dead_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
+ 'local_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
+ 'local_dead_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
+ 'pool_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
+ multivalue_undefined_means_0=True, default_func=build_pool_rates),
+ 'current_payout': graph.DataStreamDescription(dataview_descriptions),
+ 'current_payouts': graph.DataStreamDescription(dataview_descriptions, multivalues=True),
+ 'incoming_peers': graph.DataStreamDescription(dataview_descriptions),
+ 'outgoing_peers': graph.DataStreamDescription(dataview_descriptions),
+ 'miner_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
+ 'miner_dead_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
+ 'desired_versions': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
+ multivalue_undefined_means_0=True),
}, hd_obj)
task.LoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj()))).start(100)
@pseudoshare_received.watch
def add_point():
if tracker.get_height(current_work.value['best_share_hash']) < 720:
return
- nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
- poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
t = time.time()
- hd.datastreams['pool_rate'].add_datum(t, poolrate)
- hd.datastreams['pool_stale_rate'].add_datum(t, poolrate - nonstalerate)
+ hd.datastreams['pool_rates'].add_datum(t, p2pool_data.get_stale_counts(tracker, current_work.value['best_share_hash'], 720, rates=True))
current_txouts = get_current_txouts()
hd.datastreams['current_payout'].add_datum(t, current_txouts.get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8)
miner_hash_rates, miner_dead_hash_rates = get_local_rates()
hd.datastreams['current_payouts'].add_datum(t, dict((user, current_txouts_by_address[user]*1e-8) for user in miner_hash_rates if user in current_txouts_by_address))
hd.datastreams['incoming_peers'].add_datum(t, sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming))
hd.datastreams['outgoing_peers'].add_datum(t, sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming))
+
+ vs = p2pool_data.get_desired_version_counts(tracker, current_work.value['best_share_hash'], 720)
+ vs_total = sum(vs.itervalues())
+ hd.datastreams['desired_versions'].add_datum(t, dict((str(k), v/vs_total) for k, v in vs.iteritems()))
task.LoopingCall(add_point).start(5)
new_root.putChild('graph_data', WebInterface(lambda source, view: hd.datastreams[source].dataviews[view].get_data(time.time())))