+from __future__ import division
+
+import errno
import json
import os
+import sys
import time
from twisted.internet import task
from twisted.python import log
-from twisted.web import resource
+from twisted.web import resource, static
from bitcoin import data as bitcoin_data
-from . import data as p2pool_data, graphs
-from util import math
+from . import data as p2pool_data
+from util import graph, math
-def get_web_root(tracker, current_work, current_work2, get_current_txouts, datadir_path, net, get_stale_counts, my_pubkey_hash, local_rate_monitor, worker_fee, p2p_node, my_share_hashes, recent_blocks):
+def _atomic_read(filename):
+ try:
+ with open(filename, 'rb') as f:
+ return f.read()
+ except IOError, e:
+ if e.errno != errno.ENOENT:
+ raise
+ try:
+ with open(filename + '.new', 'rb') as f:
+ return f.read()
+ except IOError, e:
+ if e.errno != errno.ENOENT:
+ raise
+ return None
+
+def _atomic_write(filename, data):
+ with open(filename + '.new', 'wb') as f:
+ f.write(data)
+ f.flush()
+ try:
+ os.fsync(f.fileno())
+ except:
+ pass
+ try:
+ os.rename(filename + '.new', filename)
+ except os.error: # windows can't overwrite
+ os.remove(filename)
+ os.rename(filename + '.new', filename)
+
+def get_web_root(tracker, current_work, current_work2, get_current_txouts, datadir_path, net, get_stale_counts, my_pubkey_hash, local_rate_monitor, worker_fee, p2p_node, my_share_hashes, pseudoshare_received, share_received):
start_time = time.time()
web_root = resource.Resource()
- def get_rate():
- if tracker.get_height(current_work.value['best_share_hash']) < 720:
- return json.dumps(None)
- return json.dumps(p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
- / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720)))
-
def get_users():
height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
res = {}
for script in sorted(weights, key=lambda s: weights[s]):
- res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
- return json.dumps(res)
+ res[bitcoin_data.script2_to_address(script, net.PARENT)] = weights[script]/total_weight
+ return res
def get_current_scaled_txouts(scale, trunc=0):
txouts = get_current_txouts()
results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
return results
- def get_current_payouts():
- return json.dumps(dict((bitcoin_data.script2_to_human(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems()))
-
- def get_patron_sendmany(this):
- try:
- if '/' in this:
- this, trunc = this.split('/', 1)
- else:
- trunc = '0.01'
- return json.dumps(dict(
- (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
- for script, value in get_current_scaled_txouts(scale=int(float(this)*1e8), trunc=int(float(trunc)*1e8)).iteritems()
- if bitcoin_data.script2_to_address(script, net.PARENT) is not None
- ))
- except:
- log.err()
- return json.dumps(None)
+ def get_patron_sendmany(total=None, trunc='0.01'):
+ if total is None:
+ return 'need total argument. go to patron_sendmany/<TOTAL>'
+ total = int(float(total)*1e8)
+ trunc = int(float(trunc)*1e8)
+ return dict(
+ (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
+ for script, value in get_current_scaled_txouts(total, trunc).iteritems()
+ if bitcoin_data.script2_to_address(script, net.PARENT) is not None
+ )
def get_global_stats():
# averaged over last hour
nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
- return json.dumps(dict(
+ return dict(
pool_nonstale_hash_rate=nonstale_hash_rate,
pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
pool_stale_prop=stale_prop,
- ))
+ )
def get_local_stats():
lookbehind = 3600//net.SHARE_PERIOD
if datum['dead']:
miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
- return json.dumps(dict(
+ (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
+
+ return dict(
my_hash_rates_in_last_hour=dict(
note="DEPRECATED",
nonstale=share_att_s,
),
miner_hash_rates=miner_hash_rates,
miner_dead_hash_rates=miner_dead_hash_rates,
- ))
-
- def get_peer_addresses():
- return ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues())
-
- def get_uptime():
- return json.dumps(time.time() - start_time)
+ efficiency_if_miner_perfect=(1 - stale_orphan_shares/shares)/(1 - global_stale_prop) if shares else None, # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection
+ )
class WebInterface(resource.Resource):
- def __init__(self, func, mime_type, *fields):
- self.func, self.mime_type, self.fields = func, mime_type, fields
+ def __init__(self, func, mime_type='application/json', args=()):
+ resource.Resource.__init__(self)
+ self.func, self.mime_type, self.args = func, mime_type, args
+
+ def getChild(self, child, request):
+ return WebInterface(self.func, self.mime_type, self.args + (child,))
def render_GET(self, request):
request.setHeader('Content-Type', self.mime_type)
request.setHeader('Access-Control-Allow-Origin', '*')
- return self.func(*(request.args[field][0] for field in self.fields))
-
- web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
- web_root.putChild('users', WebInterface(get_users, 'application/json'))
- web_root.putChild('fee', WebInterface(lambda: json.dumps(worker_fee), 'application/json'))
- web_root.putChild('current_payouts', WebInterface(get_current_payouts, 'application/json'))
- web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain', 'total'))
- web_root.putChild('global_stats', WebInterface(get_global_stats, 'application/json'))
- web_root.putChild('local_stats', WebInterface(get_local_stats, 'application/json'))
- web_root.putChild('peer_addresses', WebInterface(get_peer_addresses, 'text/plain'))
- web_root.putChild('payout_addr', WebInterface(lambda: json.dumps(bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)), 'application/json'))
- web_root.putChild('recent_blocks', WebInterface(lambda: json.dumps(recent_blocks), 'application/json'))
- web_root.putChild('uptime', WebInterface(get_uptime, 'application/json'))
+ res = self.func(*self.args)
+ return json.dumps(res) if self.mime_type == 'application/json' else res
- try:
- from . import draw
- web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
- except ImportError:
- print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
+ web_root.putChild('rate', WebInterface(lambda: p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)/(1-p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))))
+ web_root.putChild('difficulty', WebInterface(lambda: bitcoin_data.target_to_difficulty(tracker.shares[current_work.value['best_share_hash']].max_target)))
+ web_root.putChild('users', WebInterface(get_users))
+ web_root.putChild('fee', WebInterface(lambda: worker_fee))
+ web_root.putChild('current_payouts', WebInterface(lambda: dict((bitcoin_data.script2_to_address(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems())))
+ web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain'))
+ web_root.putChild('global_stats', WebInterface(get_global_stats))
+ web_root.putChild('local_stats', WebInterface(get_local_stats))
+ web_root.putChild('peer_addresses', WebInterface(lambda: ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues()), 'text/plain'))
+ web_root.putChild('peer_versions', WebInterface(lambda: ''.join('%s:%i ' % peer.addr + peer.other_sub_version + '\n' for peer in p2p_node.peers.itervalues()), 'text/plain'))
+ web_root.putChild('payout_addr', WebInterface(lambda: bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)))
+ web_root.putChild('recent_blocks', WebInterface(lambda: [dict(ts=s.timestamp, hash='%064x' % s.header_hash) for s in tracker.get_chain(current_work.value['best_share_hash'], 24*60*60//net.SHARE_PERIOD) if s.pow_hash <= s.header['bits'].target]))
+ web_root.putChild('uptime', WebInterface(lambda: time.time() - start_time))
new_root = resource.Resource()
web_root.putChild('web', new_root)
with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
f.write(json.dumps(stat_log))
task.LoopingCall(update_stat_log).start(5*60)
- new_root.putChild('log', WebInterface(lambda: json.dumps(stat_log), 'application/json'))
+ new_root.putChild('log', WebInterface(lambda: stat_log))
+
+ def get_share(share_hash_str):
+ if int(share_hash_str, 16) not in tracker.shares:
+ return None
+ share = tracker.shares[int(share_hash_str, 16)]
+
+ return dict(
+ parent='%064x' % share.previous_hash,
+ children=['%064x' % x for x in sorted(tracker.reverse_shares.get(share.hash, set()), key=lambda sh: -len(tracker.reverse_shares.get(sh, set())))], # sorted from most children to least children
+ local=dict(
+ verified=share.hash in tracker.verified.shares,
+ time_first_seen=start_time if share.time_seen == 0 else share.time_seen,
+ peer_first_received_from=share.peer.addr if share.peer is not None else None,
+ ),
+ share_data=dict(
+ timestamp=share.timestamp,
+ target=share.target,
+ max_target=share.max_target,
+ payout_address=bitcoin_data.script2_to_address(share.new_script, net.PARENT),
+ donation=share.share_data['donation']/65535,
+ stale_info=share.share_data['stale_info'],
+ nonce=share.share_data['nonce'],
+ ),
+ block=dict(
+ hash='%064x' % share.header_hash,
+ header=dict(
+ version=share.header['version'],
+ previous_block='%064x' % share.header['previous_block'],
+ merkle_root='%064x' % share.header['merkle_root'],
+ timestamp=share.header['timestamp'],
+ target=share.header['bits'].target,
+ nonce=share.header['nonce'],
+ ),
+ gentx=dict(
+ hash='%064x' % share.gentx_hash,
+ coinbase=share.share_data['coinbase'].ljust(2, '\x00').encode('hex'),
+ value=share.share_data['subsidy']*1e-8,
+ ),
+ txn_count_range=[len(share.other_txs), len(share.other_txs)] if share.other_txs is not None else 1 if len(share.merkle_link['branch']) == 0 else [2**len(share.merkle_link['branch'])//2+1, 2**len(share.merkle_link['branch'])],
+ ),
+ )
+ new_root.putChild('share', WebInterface(lambda share_hash_str: get_share(share_hash_str)))
+ new_root.putChild('heads', WebInterface(lambda: ['%064x' % x for x in tracker.heads]))
+ new_root.putChild('verified_heads', WebInterface(lambda: ['%064x' % x for x in tracker.verified.heads]))
+ new_root.putChild('tails', WebInterface(lambda: ['%064x' % x for t in tracker.tails for x in tracker.reverse_shares.get(t, set())]))
+ new_root.putChild('verified_tails', WebInterface(lambda: ['%064x' % x for t in tracker.verified.tails for x in tracker.verified.reverse_shares.get(t, set())]))
+ new_root.putChild('best_share_hash', WebInterface(lambda: '%064x' % current_work.value['best_share_hash']))
+
+ class Explorer(resource.Resource):
+ def render_GET(self, request):
+ return 'moved to /static/'
+ def getChild(self, child, request):
+ return self
+ new_root.putChild('explorer', Explorer())
- grapher = graphs.Grapher(os.path.join(datadir_path, 'rrd'))
- web_root.putChild('graphs', grapher.get_resource())
+ hd_path = os.path.join(datadir_path, 'graph_db')
+ hd_data = _atomic_read(hd_path)
+ hd_obj = {}
+ if hd_data is not None:
+ try:
+ hd_obj = json.loads(hd_data)
+ except Exception:
+ log.err(None, 'Error reading graph database:')
+ dataview_descriptions = {
+ 'last_hour': graph.DataViewDescription(150, 60*60),
+ 'last_day': graph.DataViewDescription(300, 60*60*24),
+ 'last_week': graph.DataViewDescription(300, 60*60*24*7),
+ 'last_month': graph.DataViewDescription(300, 60*60*24*30),
+ 'last_year': graph.DataViewDescription(300, 60*60*24*365.25),
+ }
+ def combine_and_keep_largest(*dicts):
+ res = {}
+ for d in dicts:
+ for k, v in d.iteritems():
+ res[k] = res.get(k, 0) + v
+ return dict((k, v) for k, v in sorted(res.iteritems(), key=lambda (k, v): v)[-30:] if v)
+ hd = graph.HistoryDatabase.from_obj({
+ 'local_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
+ 'local_dead_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
+ 'local_share_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
+ 'local_dead_share_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
+ 'pool_rate': graph.DataStreamDescription(True, dataview_descriptions),
+ 'pool_stale_rate': graph.DataStreamDescription(True, dataview_descriptions),
+ 'current_payout': graph.DataStreamDescription(True, dataview_descriptions),
+ 'incoming_peers': graph.DataStreamDescription(True, dataview_descriptions),
+ 'outgoing_peers': graph.DataStreamDescription(True, dataview_descriptions),
+ 'miner_hash_rates': graph.DataStreamDescription(False, dataview_descriptions, {}, combine_and_keep_largest, math.mult_dict),
+ 'miner_dead_hash_rates': graph.DataStreamDescription(False, dataview_descriptions, {}, combine_and_keep_largest, math.mult_dict),
+ }, hd_obj)
+ task.LoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj()))).start(100)
+ @pseudoshare_received.watch
+ def _(work, dead, user):
+ t = time.time()
+ hd.datastreams['local_hash_rate'].add_datum(t, work)
+ if dead:
+ hd.datastreams['local_dead_hash_rate'].add_datum(t, work)
+ if user is not None:
+ hd.datastreams['miner_hash_rates'].add_datum(t, {user: work})
+ if dead:
+ hd.datastreams['miner_dead_hash_rates'].add_datum(t, {user: work})
+ @share_received.watch
+ def _(work, dead):
+ t = time.time()
+ hd.datastreams['local_share_hash_rate'].add_datum(t, work)
+ if dead:
+ hd.datastreams['local_dead_share_hash_rate'].add_datum(t, work)
def add_point():
if tracker.get_height(current_work.value['best_share_hash']) < 720:
return
nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
- grapher.add_poolrate_point(poolrate, poolrate - nonstalerate)
- task.LoopingCall(add_point).start(100)
+ t = time.time()
+ hd.datastreams['pool_rate'].add_datum(t, poolrate)
+ hd.datastreams['pool_stale_rate'].add_datum(t, poolrate - nonstalerate)
+ hd.datastreams['current_payout'].add_datum(t, get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8)
+ hd.datastreams['incoming_peers'].add_datum(t, sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming))
+ hd.datastreams['outgoing_peers'].add_datum(t, sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming))
+ task.LoopingCall(add_point).start(5)
+ new_root.putChild('graph_data', WebInterface(lambda source, view: hd.datastreams[source].dataviews[view].get_data(time.time())))
+
+ web_root.putChild('static', static.File(os.path.join(os.path.dirname(sys.argv[0]), 'web-static')))
return web_root