from __future__ import division
import cgi
+import errno
import json
import os
+import sys
import time
-import types
from twisted.internet import reactor, task
from twisted.python import log
-from twisted.web import resource
+from twisted.web import resource, static
from bitcoin import data as bitcoin_data
from . import data as p2pool_data, graphs
-from util import math
+from util import graph, math
+
+def _atomic_read(filename):
+ try:
+ with open(filename, 'rb') as f:
+ return f.read()
+ except IOError, e:
+ if e.errno != errno.ENOENT:
+ raise
+ try:
+ with open(filename + '.new', 'rb') as f:
+ return f.read()
+ except IOError, e:
+ if e.errno != errno.ENOENT:
+ raise
+ return None
+
+def _atomic_write(filename, data):
+ with open(filename + '.new', 'wb') as f:
+ f.write(data)
+ f.flush()
+ try:
+ os.fsync(f.fileno())
+ except:
+ pass
+ try:
+ os.rename(filename + '.new', filename)
+ except os.error: # windows can't overwrite
+ os.remove(filename)
+ os.rename(filename + '.new', filename)
def get_web_root(tracker, current_work, current_work2, get_current_txouts, datadir_path, net, get_stale_counts, my_pubkey_hash, local_rate_monitor, worker_fee, p2p_node, my_share_hashes, recent_blocks, pseudoshare_received):
start_time = time.time()
def get_users():
height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
- weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
+ weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256, False)
res = {}
for script in sorted(weights, key=lambda s: weights[s]):
res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
if datum['dead']:
miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
+ (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
+
return json.dumps(dict(
my_hash_rates_in_last_hour=dict(
note="DEPRECATED",
),
miner_hash_rates=miner_hash_rates,
miner_dead_hash_rates=miner_dead_hash_rates,
+ efficiency_if_miner_perfect=(1 - stale_orphan_shares/shares)/(1 - global_stale_prop) if shares else None, # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection
))
def get_peer_addresses():
format_bits = lambda bits: '%f (bits=%#8x) Work required: %sH</p>' % (bitcoin_data.target_to_difficulty(bits.target), bits.bits, math.format(bitcoin_data.target_to_average_attempts(bits.target)))
- request.write('<h1>Share <a href="%x">%s</a></h1>' % (share.hash, p2pool_data.format_hash(share.hash)))
- request.write('<p>Previous: <a href="%x">%s</a></p>' % (share.previous_hash, p2pool_data.format_hash(share.previous_hash)))
- request.write('<p>Next: %s</p>' % (', '.join('<a href="%x">%s</a>' % (next, p2pool_data.format_hash(next)) for next in tracker.reverse_shares.get(share.hash, set())),))
+ request.write('<h1>%s <a href="%x">%s</a></h1>' % (share.__class__.__name__, share.hash, p2pool_data.format_hash(share.hash)))
+ if share.previous_hash is not None:
+ request.write('<p>Previous: <a href="%x">%s</a>' % (share.previous_hash, p2pool_data.format_hash(share.previous_hash)))
+ if tracker.get_height(share.hash) >= 100:
+ jump_hash = tracker.get_nth_parent_hash(share.hash, 100)
+ if jump_hash is not None:
+ request.write(' (100 jump <a href="%x">%s</a>)' % (jump_hash, p2pool_data.format_hash(jump_hash)))
+ request.write('</p>')
+ request.write('<p>Next: %s</p>' % (', '.join('<a href="%x">%s</a>' % (next, p2pool_data.format_hash(next)) for next in sorted(tracker.reverse_shares.get(share.hash, set()), key=lambda sh: -len(tracker.reverse_shares.get(sh, set())))),))
request.write('<p>Verified: %s</p>' % (share.hash in tracker.verified.shares,))
request.write('<p>Time first seen: %s</p>' % (time.ctime(start_time if share.time_seen == 0 else share.time_seen),))
request.write('<p>Peer first received from: %s</p>' % ('%s:%i' % share.peer.addr if share.peer is not None else 'self or cache',))
request.write('<p>Nonce: %i</p>' % (share.header['nonce'],))
if share.other_txs is not None:
tx_count = len(share.other_txs)
- elif len(share.merkle_branch) == 0:
+ elif len(share.merkle_link['branch']) == 0:
tx_count = 1
else:
- tx_count = 'between %i and %i' % (2**len(share.merkle_branch)//2+1, 2**len(share.merkle_branch))
+ tx_count = 'between %i and %i' % (2**len(share.merkle_link['branch'])//2+1, 2**len(share.merkle_link['branch']))
request.write('<p>Transactions: %s</p>' % (tx_count,))
coinbase = share.share_data['coinbase'].ljust(2, '\x00')
request.write('<p>Coinbase: %s %s</p>' % (cgi.escape(repr(coinbase)), coinbase.encode('hex')))
reactor.callLater(1, grapher.add_localrate_point, work, dead)
if user is not None:
reactor.callLater(1, grapher.add_localminer_point, user, work, dead)
-
+
+ hd_path = os.path.join(datadir_path, 'graph_db')
+ hd_data = _atomic_read(hd_path)
+ hd_obj = {}
+ if hd_data is not None:
+ try:
+ hd_obj = json.loads(hd_data)
+ except Exception:
+ log.err(None, 'Error reading graph database:')
+ dataview_descriptions = {
+ 'last_hour': graph.DataViewDescription(150, 60*60),
+ 'last_day': graph.DataViewDescription(300, 60*60*24),
+ 'last_week': graph.DataViewDescription(300, 60*60*24*7),
+ 'last_month': graph.DataViewDescription(300, 60*60*24*30),
+ 'last_year': graph.DataViewDescription(300, 60*60*24*365.25),
+ }
+ hd = graph.HistoryDatabase.from_obj({
+ 'local_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
+ 'local_dead_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
+ 'pool_rate': graph.DataStreamDescription(True, dataview_descriptions),
+ 'pool_stale_rate': graph.DataStreamDescription(True, dataview_descriptions),
+ 'current_payout': graph.DataStreamDescription(True, dataview_descriptions),
+ 'incoming_peers': graph.DataStreamDescription(True, dataview_descriptions),
+ 'outgoing_peers': graph.DataStreamDescription(True, dataview_descriptions),
+ }, hd_obj)
+ task.LoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj()))).start(100)
+ @pseudoshare_received.watch
+ def _(work, dead, user):
+ t = time.time()
+ hd.datastreams['local_hash_rate'].add_datum(t, work)
+ if dead:
+ hd.datastreams['local_dead_hash_rate'].add_datum(t, work)
+ def add_point():
+ if tracker.get_height(current_work.value['best_share_hash']) < 720:
+ return
+ nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
+ poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
+ t = time.time()
+ hd.datastreams['pool_rate'].add_datum(t, poolrate)
+ hd.datastreams['pool_stale_rate'].add_datum(t, poolrate - nonstalerate)
+ hd.datastreams['current_payout'].add_datum(t, get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8)
+ hd.datastreams['incoming_peers'].add_datum(t, sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming))
+ hd.datastreams['outgoing_peers'].add_datum(t, sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming))
+ task.LoopingCall(add_point).start(5)
+ new_root.putChild('graph_data', WebInterface(lambda source, view: json.dumps(hd.datastreams[source].dataviews[view].get_data(time.time())), 'application/json'))
+
+ web_root.putChild('static', static.File(os.path.join(os.path.dirname(sys.argv[0]), 'web-static')))
return web_root