1 from __future__ import division
10 from twisted.internet import reactor, task
11 from twisted.python import log
12 from twisted.web import resource, static
14 from bitcoin import data as bitcoin_data
15 from . import data as p2pool_data, graphs
16 from util import graph, math
18 def _atomic_read(filename):
20 with open(filename, 'rb') as f:
23 if e.errno != errno.ENOENT:
26 with open(filename + '.new', 'rb') as f:
29 if e.errno != errno.ENOENT:
33 def _atomic_write(filename, data):
34 with open(filename + '.new', 'wb') as f:
42 os.rename(filename + '.new', filename)
43 except os.error: # windows can't overwrite
45 os.rename(filename + '.new', filename)
47 def get_web_root(tracker, current_work, current_work2, get_current_txouts, datadir_path, net, get_stale_counts, my_pubkey_hash, local_rate_monitor, worker_fee, p2p_node, my_share_hashes, recent_blocks, pseudoshare_received, share_received):
48 start_time = time.time()
50 web_root = resource.Resource()
53 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
54 weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256, False)
56 for script in sorted(weights, key=lambda s: weights[s]):
57 res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
58 return json.dumps(res)
60 def get_current_scaled_txouts(scale, trunc=0):
61 txouts = get_current_txouts()
62 total = sum(txouts.itervalues())
63 results = dict((script, value*scale//total) for script, value in txouts.iteritems())
67 for s in sorted(results, key=results.__getitem__):
68 if results[s] >= trunc:
70 total_random += results[s]
73 winner = math.weighted_choice((script, results[script]) for script in random_set)
74 for script in random_set:
76 results[winner] = total_random
77 if sum(results.itervalues()) < int(scale):
78 results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
81 def get_patron_sendmany(total=None, trunc='0.01'):
83 return 'need total argument. go to patron_sendmany/<TOTAL>'
84 total = int(float(total)*1e8)
85 trunc = int(float(trunc)*1e8)
87 (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
88 for script, value in get_current_scaled_txouts(total, trunc).iteritems()
89 if bitcoin_data.script2_to_address(script, net.PARENT) is not None
92 def get_global_stats():
93 # averaged over last hour
94 lookbehind = 3600//net.SHARE_PERIOD
95 if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
98 nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
99 stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
101 pool_nonstale_hash_rate=nonstale_hash_rate,
102 pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
103 pool_stale_prop=stale_prop,
106 def get_local_stats():
107 lookbehind = 3600//net.SHARE_PERIOD
108 if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
111 global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
113 my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
114 my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
115 my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
116 my_share_count = my_unstale_count + my_orphan_count + my_doa_count
117 my_stale_count = my_orphan_count + my_doa_count
119 my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
121 my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
122 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1)
123 if share.hash in my_share_hashes)
124 actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp -
125 tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp)
126 share_att_s = my_work / actual_time
128 miner_hash_rates = {}
129 miner_dead_hash_rates = {}
130 datums, dt = local_rate_monitor.get_datums_in_last()
132 miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
134 miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
136 (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
139 my_hash_rates_in_last_hour=dict(
141 nonstale=share_att_s,
142 rewarded=share_att_s/(1 - global_stale_prop),
143 actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
145 my_share_counts_in_last_hour=dict(
146 shares=my_share_count,
147 unstale_shares=my_unstale_count,
148 stale_shares=my_stale_count,
149 orphan_stale_shares=my_orphan_count,
150 doa_stale_shares=my_doa_count,
152 my_stale_proportions_in_last_hour=dict(
154 orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
155 dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
157 miner_hash_rates=miner_hash_rates,
158 miner_dead_hash_rates=miner_dead_hash_rates,
159 efficiency_if_miner_perfect=(1 - stale_orphan_shares/shares)/(1 - global_stale_prop) if shares else None, # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection
162 class WebInterface(resource.Resource):
163 def __init__(self, func, mime_type='application/json', args=()):
164 resource.Resource.__init__(self)
165 self.func, self.mime_type, self.args = func, mime_type, args
167 def getChild(self, child, request):
168 return WebInterface(self.func, self.mime_type, self.args + (child,))
170 def render_GET(self, request):
171 request.setHeader('Content-Type', self.mime_type)
172 request.setHeader('Access-Control-Allow-Origin', '*')
173 res = self.func(*self.args)
174 return json.dumps(res) if self.mime_type == 'application/json' else res
176 web_root.putChild('rate', WebInterface(lambda: p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)/(1-p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))))
177 web_root.putChild('difficulty', WebInterface(lambda: bitcoin_data.target_to_difficulty(tracker.shares[current_work.value['best_share_hash']].max_target)))
178 web_root.putChild('users', WebInterface(get_users))
179 web_root.putChild('fee', WebInterface(lambda: worker_fee))
180 web_root.putChild('current_payouts', WebInterface(lambda: dict((bitcoin_data.script2_to_human(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems())))
181 web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain'))
182 web_root.putChild('global_stats', WebInterface(get_global_stats))
183 web_root.putChild('local_stats', WebInterface(get_local_stats))
184 web_root.putChild('peer_addresses', WebInterface(lambda: ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues()), 'text/plain'))
185 web_root.putChild('peer_versions', WebInterface(lambda: ''.join('%s:%i ' % peer.addr + peer.other_sub_version + '\n' for peer in p2p_node.peers.itervalues()), 'text/plain'))
186 web_root.putChild('payout_addr', WebInterface(lambda: bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)))
187 web_root.putChild('recent_blocks', WebInterface(lambda: recent_blocks))
188 web_root.putChild('uptime', WebInterface(lambda: time.time() - start_time))
192 web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
194 print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
196 new_root = resource.Resource()
197 web_root.putChild('web', new_root)
200 if os.path.exists(os.path.join(datadir_path, 'stats')):
202 with open(os.path.join(datadir_path, 'stats'), 'rb') as f:
203 stat_log = json.loads(f.read())
205 log.err(None, 'Error loading stats:')
206 def update_stat_log():
207 while stat_log and stat_log[0]['time'] < time.time() - 24*60*60:
210 lookbehind = 3600//net.SHARE_PERIOD
211 if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
214 global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
215 (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
217 miner_hash_rates = {}
218 miner_dead_hash_rates = {}
219 datums, dt = local_rate_monitor.get_datums_in_last()
221 miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
223 miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
225 stat_log.append(dict(
227 pool_hash_rate=p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)/(1-global_stale_prop),
228 pool_stale_prop=global_stale_prop,
229 local_hash_rates=miner_hash_rates,
230 local_dead_hash_rates=miner_dead_hash_rates,
232 stale_shares=stale_orphan_shares + stale_doa_shares,
233 stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares),
234 current_payout=get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8,
236 incoming=sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
237 outgoing=sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming),
239 attempts_to_share=bitcoin_data.target_to_average_attempts(tracker.shares[current_work.value['best_share_hash']].max_target),
240 attempts_to_block=bitcoin_data.target_to_average_attempts(current_work.value['bits'].target),
241 block_value=current_work2.value['subsidy']*1e-8,
244 with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
245 f.write(json.dumps(stat_log))
246 task.LoopingCall(update_stat_log).start(5*60)
247 new_root.putChild('log', WebInterface(lambda: stat_log))
249 class ShareExplorer(resource.Resource):
250 def __init__(self, share_hash):
251 self.share_hash = share_hash
252 def render_GET(self, request):
253 request.setHeader('Content-Type', 'text/html')
254 if self.share_hash not in tracker.shares:
255 return 'share not known'
256 share = tracker.shares[self.share_hash]
258 format_bits = lambda bits: '%f (bits=%#8x) Work required: %sH' % (bitcoin_data.target_to_difficulty(bits.target), bits.bits, math.format(bitcoin_data.target_to_average_attempts(bits.target)))
260 request.write('<h1>%s <a href="%x">%s</a></h1>' % (share.__class__.__name__, share.hash, p2pool_data.format_hash(share.hash)))
261 if share.previous_hash is not None:
262 request.write('<p>Previous: <a href="%x">%s</a>' % (share.previous_hash, p2pool_data.format_hash(share.previous_hash)))
263 if tracker.get_height(share.hash) >= 100:
264 jump_hash = tracker.get_nth_parent_hash(share.hash, 100)
265 if jump_hash is not None:
266 request.write(' (100 jump <a href="%x">%s</a>)' % (jump_hash, p2pool_data.format_hash(jump_hash)))
267 request.write('</p>')
268 request.write('<p>Next: %s</p>' % (', '.join('<a href="%x">%s</a>' % (next, p2pool_data.format_hash(next)) for next in sorted(tracker.reverse_shares.get(share.hash, set()), key=lambda sh: -len(tracker.reverse_shares.get(sh, set())))),))
269 request.write('<p>Verified: %s</p>' % (share.hash in tracker.verified.shares,))
270 request.write('<p>Time first seen: %s</p>' % (time.ctime(start_time if share.time_seen == 0 else share.time_seen),))
271 request.write('<p>Peer first received from: %s</p>' % ('%s:%i' % share.peer.addr if share.peer is not None else 'self or cache',))
273 request.write('<h2>Share data</h2>')
274 request.write('<p>Timestamp: %s (%i)</p>' % (time.ctime(share.timestamp), share.timestamp))
275 request.write('<p>Difficulty: %s</p>' % (format_bits(share.share_info['bits']),))
276 request.write('<p>Minimum difficulty: %s</p>' % (format_bits(share.share_info.get('max_bits', share.share_info['bits'])),))
277 request.write('<p>Payout script: %s</p>' % (bitcoin_data.script2_to_human(share.new_script, share.net.PARENT),))
278 request.write('<p>Donation: %.2f%%</p>' % (share.share_data['donation']/65535*100,))
279 request.write('<p>Stale info: %s</p>' % ({0: 'none', 253: 'had an orphan', 254: 'had a dead'}.get(share.share_data['stale_info'], 'unknown %i' % (share.share_data['stale_info'],)),))
280 request.write('<p>Nonce: %s</p>' % (cgi.escape(repr(share.share_data['nonce'])),))
282 request.write('<h2>Block header</h2>')
283 request.write('<p>Hash: <a href="%s%064x">%064x</a></p>' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash, share.header_hash))
284 request.write('<p>Version: %i</p>' % (share.header['version'],))
285 request.write('<p>Previous block: <a href="%s%064x">%064x</a></p>' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header['previous_block'], share.header['previous_block']))
286 request.write('<p>Timestamp: %s (%i)</p>' % (time.ctime(share.header['timestamp']), share.header['timestamp']))
287 request.write('<p>Difficulty: %f (bits=%#8x) Work: %sH</p>' % (bitcoin_data.target_to_difficulty(share.header['bits'].target), share.header['bits'].bits, math.format(bitcoin_data.target_to_average_attempts(share.header['bits'].target))))
288 request.write('<p>Nonce: %i</p>' % (share.header['nonce'],))
289 if share.other_txs is not None:
290 tx_count = len(share.other_txs)
291 elif len(share.merkle_link['branch']) == 0:
294 tx_count = 'between %i and %i' % (2**len(share.merkle_link['branch'])//2+1, 2**len(share.merkle_link['branch']))
295 request.write('<p>Transactions: %s</p>' % (tx_count,))
296 coinbase = share.share_data['coinbase'].ljust(2, '\x00')
297 request.write('<p>Coinbase: %s %s</p>' % (cgi.escape(repr(coinbase)), coinbase.encode('hex')))
298 request.write('<p>Generation value: %.8f %s</p>' % (share.share_data['subsidy']*1e-8, net.PARENT.SYMBOL))
299 #request.write('<p>Generation txn: %32x</p>' % (share.gentx_hash,))
302 class Explorer(resource.Resource):
303 def render_GET(self, request):
304 if not request.path.endswith('/'):
305 request.redirect(request.path + '/')
307 request.setHeader('Content-Type', 'text/html')
308 request.write('<h1>P2Pool share explorer</h1>')
310 request.write('<h2>Verified heads</h2>')
311 request.write('<ul>')
312 for h in tracker.verified.heads:
313 request.write('<li><a href="%x">%s%s</a></li>' % (h, p2pool_data.format_hash(h), ' BEST' if h == current_work.value['best_share_hash'] else ''))
314 request.write('</ul>')
316 request.write('<h2>Verified tails</h2>')
317 request.write('<ul>')
318 for tail in tracker.verified.tails:
319 for h in tracker.reverse_shares.get(tail, set()):
320 request.write('<li><a href="%x">%s%s</a></li>' % (h, p2pool_data.format_hash(h), ' BEST' if h == current_work.value['best_share_hash'] else ''))
321 request.write('</ul>')
323 request.write('<h2>Heads</h2>')
324 request.write('<ul>')
325 for h in tracker.heads:
326 request.write('<li><a href="%x">%s%s</a></li>' % (h, p2pool_data.format_hash(h), ' BEST' if h == current_work.value['best_share_hash'] else ''))
327 request.write('</ul>')
329 request.write('<h2>Tails</h2>')
330 request.write('<ul>')
331 for tail in tracker.tails:
332 for h in tracker.reverse_shares.get(tail, set()):
333 request.write('<li><a href="%x">%s%s</a></li>' % (h, p2pool_data.format_hash(h), ' BEST' if h == current_work.value['best_share_hash'] else ''))
334 request.write('</ul>')
337 def getChild(self, child, request):
340 return ShareExplorer(int(child, 16))
341 new_root.putChild('explorer', Explorer())
343 grapher = graphs.Grapher(os.path.join(datadir_path, 'rrd'))
344 web_root.putChild('graphs', grapher.get_resource())
346 if tracker.get_height(current_work.value['best_share_hash']) < 720:
348 nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
349 poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
350 grapher.add_poolrate_point(poolrate, poolrate - nonstalerate)
351 task.LoopingCall(add_point).start(100)
352 @pseudoshare_received.watch
353 def _(work, dead, user, had_vip_pass):
354 reactor.callLater(1, grapher.add_localrate_point, work, dead)
355 if user is not None and had_vip_pass:
356 reactor.callLater(1, grapher.add_localminer_point, user, work, dead)
358 hd_path = os.path.join(datadir_path, 'graph_db')
359 hd_data = _atomic_read(hd_path)
361 if hd_data is not None:
363 hd_obj = json.loads(hd_data)
365 log.err(None, 'Error reading graph database:')
366 dataview_descriptions = {
367 'last_hour': graph.DataViewDescription(150, 60*60),
368 'last_day': graph.DataViewDescription(300, 60*60*24),
369 'last_week': graph.DataViewDescription(300, 60*60*24*7),
370 'last_month': graph.DataViewDescription(300, 60*60*24*30),
371 'last_year': graph.DataViewDescription(300, 60*60*24*365.25),
373 def combine_and_keep_largest(*dicts):
376 for k, v in d.iteritems():
377 res[k] = res.get(k, 0) + v
378 return dict((k, v) for k, v in sorted(res.iteritems(), key=lambda (k, v): v)[-30:] if v)
379 hd = graph.HistoryDatabase.from_obj({
380 'local_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
381 'local_dead_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
382 'local_share_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
383 'local_dead_share_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
384 'pool_rate': graph.DataStreamDescription(True, dataview_descriptions),
385 'pool_stale_rate': graph.DataStreamDescription(True, dataview_descriptions),
386 'current_payout': graph.DataStreamDescription(True, dataview_descriptions),
387 'incoming_peers': graph.DataStreamDescription(True, dataview_descriptions),
388 'outgoing_peers': graph.DataStreamDescription(True, dataview_descriptions),
389 'miner_hash_rates': graph.DataStreamDescription(False, dataview_descriptions, {}, combine_and_keep_largest, math.mult_dict),
390 'miner_dead_hash_rates': graph.DataStreamDescription(False, dataview_descriptions, {}, combine_and_keep_largest, math.mult_dict),
392 task.LoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj()))).start(100)
393 @pseudoshare_received.watch
394 def _(work, dead, user, had_vip_pass):
396 hd.datastreams['local_hash_rate'].add_datum(t, work)
398 hd.datastreams['local_dead_hash_rate'].add_datum(t, work)
400 hd.datastreams['miner_hash_rates'].add_datum(t, {user: work})
402 hd.datastreams['miner_dead_hash_rates'].add_datum(t, {user: work})
403 @share_received.watch
406 hd.datastreams['local_share_hash_rate'].add_datum(t, work)
408 hd.datastreams['local_dead_share_hash_rate'].add_datum(t, work)
410 if tracker.get_height(current_work.value['best_share_hash']) < 720:
412 nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
413 poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
415 hd.datastreams['pool_rate'].add_datum(t, poolrate)
416 hd.datastreams['pool_stale_rate'].add_datum(t, poolrate - nonstalerate)
417 hd.datastreams['current_payout'].add_datum(t, get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8)
418 hd.datastreams['incoming_peers'].add_datum(t, sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming))
419 hd.datastreams['outgoing_peers'].add_datum(t, sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming))
420 task.LoopingCall(add_point).start(5)
421 new_root.putChild('graph_data', WebInterface(lambda source, view: hd.datastreams[source].dataviews[view].get_data(time.time())))
423 web_root.putChild('static', static.File(os.path.join(os.path.dirname(sys.argv[0]), 'web-static')))