added broken_mode option to get_cumulative_weights
[p2pool.git] / p2pool / web.py
1 from __future__ import division
2
3 import cgi
4 import json
5 import os
6 import sys
7 import time
8
9 from twisted.internet import reactor, task
10 from twisted.python import log
11 from twisted.web import resource, static
12
13 from bitcoin import data as bitcoin_data
14 from . import data as p2pool_data, graphs
15 from util import graph, math
16
17 def get_web_root(tracker, current_work, current_work2, get_current_txouts, datadir_path, net, get_stale_counts, my_pubkey_hash, local_rate_monitor, worker_fee, p2p_node, my_share_hashes, recent_blocks, pseudoshare_received):
18     start_time = time.time()
19     
20     web_root = resource.Resource()
21     
22     def get_rate():
23         if tracker.get_height(current_work.value['best_share_hash']) < 720:
24             return json.dumps(None)
25         return json.dumps(p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
26             / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720)))
27     
28     def get_users():
29         height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
30         weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256, False)
31         res = {}
32         for script in sorted(weights, key=lambda s: weights[s]):
33             res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
34         return json.dumps(res)
35     
36     def get_current_scaled_txouts(scale, trunc=0):
37         txouts = get_current_txouts()
38         total = sum(txouts.itervalues())
39         results = dict((script, value*scale//total) for script, value in txouts.iteritems())
40         if trunc > 0:
41             total_random = 0
42             random_set = set()
43             for s in sorted(results, key=results.__getitem__):
44                 if results[s] >= trunc:
45                     break
46                 total_random += results[s]
47                 random_set.add(s)
48             if total_random:
49                 winner = math.weighted_choice((script, results[script]) for script in random_set)
50                 for script in random_set:
51                     del results[script]
52                 results[winner] = total_random
53         if sum(results.itervalues()) < int(scale):
54             results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
55         return results
56     
57     def get_current_payouts():
58         return json.dumps(dict((bitcoin_data.script2_to_human(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems()))
59     
60     def get_patron_sendmany(total=None, trunc='0.01'):
61         if total is None:
62             return 'need total argument. go to patron_sendmany/<TOTAL>'
63         total = int(float(total)*1e8)
64         trunc = int(float(trunc)*1e8)
65         return json.dumps(dict(
66             (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
67             for script, value in get_current_scaled_txouts(total, trunc).iteritems()
68             if bitcoin_data.script2_to_address(script, net.PARENT) is not None
69         ))
70     
71     def get_global_stats():
72         # averaged over last hour
73         lookbehind = 3600//net.SHARE_PERIOD
74         if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
75             return None
76         
77         nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
78         stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
79         return json.dumps(dict(
80             pool_nonstale_hash_rate=nonstale_hash_rate,
81             pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
82             pool_stale_prop=stale_prop,
83         ))
84     
85     def get_local_stats():
86         lookbehind = 3600//net.SHARE_PERIOD
87         if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
88             return None
89         
90         global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
91         
92         my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
93         my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
94         my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
95         my_share_count = my_unstale_count + my_orphan_count + my_doa_count
96         my_stale_count = my_orphan_count + my_doa_count
97         
98         my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
99         
100         my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
101             for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1)
102             if share.hash in my_share_hashes)
103         actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp -
104             tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp)
105         share_att_s = my_work / actual_time
106         
107         miner_hash_rates = {}
108         miner_dead_hash_rates = {}
109         datums, dt = local_rate_monitor.get_datums_in_last()
110         for datum in datums:
111             miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
112             if datum['dead']:
113                 miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
114         
115         return json.dumps(dict(
116             my_hash_rates_in_last_hour=dict(
117                 note="DEPRECATED",
118                 nonstale=share_att_s,
119                 rewarded=share_att_s/(1 - global_stale_prop),
120                 actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
121             ),
122             my_share_counts_in_last_hour=dict(
123                 shares=my_share_count,
124                 unstale_shares=my_unstale_count,
125                 stale_shares=my_stale_count,
126                 orphan_stale_shares=my_orphan_count,
127                 doa_stale_shares=my_doa_count,
128             ),
129             my_stale_proportions_in_last_hour=dict(
130                 stale=my_stale_prop,
131                 orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
132                 dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
133             ),
134             miner_hash_rates=miner_hash_rates,
135             miner_dead_hash_rates=miner_dead_hash_rates,
136         ))
137     
138     def get_peer_addresses():
139         return ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues())
140     
141     def get_uptime():
142         return json.dumps(time.time() - start_time)
143     
144     class WebInterface(resource.Resource):
145         def __init__(self, func, mime_type, args=()):
146             resource.Resource.__init__(self)
147             self.func, self.mime_type, self.args = func, mime_type, args
148         
149         def getChild(self, child, request):
150             return WebInterface(self.func, self.mime_type, self.args + (child,))
151         
152         def render_GET(self, request):
153             request.setHeader('Content-Type', self.mime_type)
154             request.setHeader('Access-Control-Allow-Origin', '*')
155             return self.func(*self.args)
156     
157     web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
158     web_root.putChild('users', WebInterface(get_users, 'application/json'))
159     web_root.putChild('fee', WebInterface(lambda: json.dumps(worker_fee), 'application/json'))
160     web_root.putChild('current_payouts', WebInterface(get_current_payouts, 'application/json'))
161     web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain'))
162     web_root.putChild('global_stats', WebInterface(get_global_stats, 'application/json'))
163     web_root.putChild('local_stats', WebInterface(get_local_stats, 'application/json'))
164     web_root.putChild('peer_addresses', WebInterface(get_peer_addresses, 'text/plain'))
165     web_root.putChild('peer_versions', WebInterface(lambda: ''.join('%s:%i ' % peer.addr + peer.other_sub_version + '\n' for peer in p2p_node.peers.itervalues()), 'text/plain'))
166     web_root.putChild('payout_addr', WebInterface(lambda: json.dumps(bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)), 'application/json'))
167     web_root.putChild('recent_blocks', WebInterface(lambda: json.dumps(recent_blocks), 'application/json'))
168     web_root.putChild('uptime', WebInterface(get_uptime, 'application/json'))
169     
170     try:
171         from . import draw
172         web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
173     except ImportError:
174         print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
175     
176     new_root = resource.Resource()
177     web_root.putChild('web', new_root)
178     
179     stat_log = []
180     if os.path.exists(os.path.join(datadir_path, 'stats')):
181         try:
182             with open(os.path.join(datadir_path, 'stats'), 'rb') as f:
183                 stat_log = json.loads(f.read())
184         except:
185             log.err(None, 'Error loading stats:')
186     def update_stat_log():
187         while stat_log and stat_log[0]['time'] < time.time() - 24*60*60:
188             stat_log.pop(0)
189         
190         lookbehind = 3600//net.SHARE_PERIOD
191         if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
192             return None
193         
194         global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
195         (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
196         
197         miner_hash_rates = {}
198         miner_dead_hash_rates = {}
199         datums, dt = local_rate_monitor.get_datums_in_last()
200         for datum in datums:
201             miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
202             if datum['dead']:
203                 miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
204         
205         stat_log.append(dict(
206             time=time.time(),
207             pool_hash_rate=p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)/(1-global_stale_prop),
208             pool_stale_prop=global_stale_prop,
209             local_hash_rates=miner_hash_rates,
210             local_dead_hash_rates=miner_dead_hash_rates,
211             shares=shares,
212             stale_shares=stale_orphan_shares + stale_doa_shares,
213             stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares),
214             current_payout=get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8,
215             peers=dict(
216                 incoming=sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
217                 outgoing=sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming),
218             ),
219             attempts_to_share=bitcoin_data.target_to_average_attempts(tracker.shares[current_work.value['best_share_hash']].max_target),
220             attempts_to_block=bitcoin_data.target_to_average_attempts(current_work.value['bits'].target),
221             block_value=current_work2.value['subsidy']*1e-8,
222         ))
223         
224         with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
225             f.write(json.dumps(stat_log))
226     task.LoopingCall(update_stat_log).start(5*60)
227     new_root.putChild('log', WebInterface(lambda: json.dumps(stat_log), 'application/json'))
228     
229     class ShareExplorer(resource.Resource):
230         def __init__(self, share_hash):
231             self.share_hash = share_hash
232         def render_GET(self, request):
233             request.setHeader('Content-Type', 'text/html')
234             if self.share_hash not in tracker.shares:
235                 return 'share not known'
236             share = tracker.shares[self.share_hash]
237             
238             format_bits = lambda bits: '%f (bits=%#8x) Work required: %sH</p>' % (bitcoin_data.target_to_difficulty(bits.target), bits.bits, math.format(bitcoin_data.target_to_average_attempts(bits.target)))
239             
240             request.write('<h1>Share <a href="%x">%s</a></h1>' % (share.hash, p2pool_data.format_hash(share.hash)))
241             if share.previous_hash is not None:
242                 request.write('<p>Previous: <a href="%x">%s</a>' % (share.previous_hash, p2pool_data.format_hash(share.previous_hash)))
243             if tracker.get_height(share.hash) >= 100:
244                 jump_hash = tracker.get_nth_parent_hash(share.hash, 100)
245                 if jump_hash is not None:
246                     request.write(' (100 jump <a href="%x">%s</a>)' % (jump_hash, p2pool_data.format_hash(jump_hash)))
247             request.write('</p>')
248             request.write('<p>Next: %s</p>' % (', '.join('<a href="%x">%s</a>' % (next, p2pool_data.format_hash(next)) for next in sorted(tracker.reverse_shares.get(share.hash, set()), key=lambda sh: -len(tracker.reverse_shares.get(sh, set())))),))
249             request.write('<p>Verified: %s</p>' % (share.hash in tracker.verified.shares,))
250             request.write('<p>Time first seen: %s</p>' % (time.ctime(start_time if share.time_seen == 0 else share.time_seen),))
251             request.write('<p>Peer first received from: %s</p>' % ('%s:%i' % share.peer.addr if share.peer is not None else 'self or cache',))
252             
253             request.write('<h2>Share data</h2>')
254             request.write('<p>Timestamp: %s (%i)</p>' % (time.ctime(share.timestamp), share.timestamp))
255             request.write('<p>Difficulty: %s</p>' % (format_bits(share.share_info['bits']),))
256             request.write('<p>Minimum difficulty: %s</p>' % (format_bits(share.share_info.get('max_bits', share.share_info['bits'])),))
257             request.write('<p>Payout script: %s</p>' % (bitcoin_data.script2_to_human(share.new_script, share.net.PARENT),))
258             request.write('<p>Donation: %.2f%%</p>' % (share.share_data['donation']/65535*100,))
259             request.write('<p>Stale info: %s</p>' % ({0: 'none', 253: 'had an orphan', 254: 'had a dead'}.get(share.share_data['stale_info'], 'unknown %i' % (share.share_data['stale_info'],)),))
260             request.write('<p>Nonce: %s</p>' % (cgi.escape(repr(share.share_data['nonce'])),))
261             
262             request.write('<h2>Block header</h2>')
263             request.write('<p>Hash: <a href="%s%064x">%064x</a></p>' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash, share.header_hash))
264             request.write('<p>Version: %i</p>' % (share.header['version'],))
265             request.write('<p>Previous block: <a href="%s%064x">%064x</a></p>' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header['previous_block'], share.header['previous_block']))
266             request.write('<p>Timestamp: %s (%i)</p>' % (time.ctime(share.header['timestamp']), share.header['timestamp']))
267             request.write('<p>Difficulty: %f (bits=%#8x) Work: %sH</p>' % (bitcoin_data.target_to_difficulty(share.header['bits'].target), share.header['bits'].bits, math.format(bitcoin_data.target_to_average_attempts(share.header['bits'].target))))
268             request.write('<p>Nonce: %i</p>' % (share.header['nonce'],))
269             if share.other_txs is not None:
270                 tx_count = len(share.other_txs)
271             elif len(share.merkle_branch) == 0:
272                 tx_count = 1
273             else:
274                 tx_count = 'between %i and %i' % (2**len(share.merkle_branch)//2+1, 2**len(share.merkle_branch))
275             request.write('<p>Transactions: %s</p>' % (tx_count,))
276             coinbase = share.share_data['coinbase'].ljust(2, '\x00')
277             request.write('<p>Coinbase: %s %s</p>' % (cgi.escape(repr(coinbase)), coinbase.encode('hex')))
278             request.write('<p>Generation value: %.8f %s</p>' % (share.share_data['subsidy']*1e-8, net.PARENT.SYMBOL))
279             #request.write('<p>Generation txn: %32x</p>' % (share.gentx_hash,))
280             
281             return ''
282     class Explorer(resource.Resource):
283         def render_GET(self, request):
284             if not request.path.endswith('/'):
285                 request.redirect(request.path + '/')
286                 return ''
287             request.setHeader('Content-Type', 'text/html')
288             request.write('<h1>P2Pool share explorer</h1>')
289             
290             request.write('<h2>Verified heads</h2>')
291             request.write('<ul>')
292             for h in tracker.verified.heads:
293                 request.write('<li><a href="%x">%s%s</a></li>' % (h, p2pool_data.format_hash(h), ' BEST' if h == current_work.value['best_share_hash'] else ''))
294             request.write('</ul>')
295             
296             request.write('<h2>Verified tails</h2>')
297             request.write('<ul>')
298             for tail in tracker.verified.tails:
299                 for h in tracker.reverse_shares.get(tail, set()):
300                     request.write('<li><a href="%x">%s%s</a></li>' % (h, p2pool_data.format_hash(h), ' BEST' if h == current_work.value['best_share_hash'] else ''))
301             request.write('</ul>')
302             
303             request.write('<h2>Heads</h2>')
304             request.write('<ul>')
305             for h in tracker.heads:
306                 request.write('<li><a href="%x">%s%s</a></li>' % (h, p2pool_data.format_hash(h), ' BEST' if h == current_work.value['best_share_hash'] else ''))
307             request.write('</ul>')
308             
309             request.write('<h2>Tails</h2>')
310             request.write('<ul>')
311             for tail in tracker.tails:
312                 for h in tracker.reverse_shares.get(tail, set()):
313                     request.write('<li><a href="%x">%s%s</a></li>' % (h, p2pool_data.format_hash(h), ' BEST' if h == current_work.value['best_share_hash'] else ''))
314             request.write('</ul>')
315             
316             return ''
317         def getChild(self, child, request):
318             if not child:
319                 return self
320             return ShareExplorer(int(child, 16))
321     new_root.putChild('explorer', Explorer())
322     
323     grapher = graphs.Grapher(os.path.join(datadir_path, 'rrd'))
324     web_root.putChild('graphs', grapher.get_resource())
325     def add_point():
326         if tracker.get_height(current_work.value['best_share_hash']) < 720:
327             return
328         nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
329         poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
330         grapher.add_poolrate_point(poolrate, poolrate - nonstalerate)
331     task.LoopingCall(add_point).start(100)
332     @pseudoshare_received.watch
333     def _(work, dead, user):
334         reactor.callLater(1, grapher.add_localrate_point, work, dead)
335         if user is not None:
336             reactor.callLater(1, grapher.add_localminer_point, user, work, dead)
337     
338     hd_path = os.path.join(datadir_path, 'graph_db')
339     hd_obj = {}
340     if os.path.exists(hd_path):
341         try:
342             hd_obj = json.loads(open(hd_path, 'rb').read())
343         except Exception:
344             log.err(None, 'Error reading graph database:')
345     dataview_descriptions = {
346         'last_hour': graph.DataViewDescription(150, 60*60),
347         'last_day': graph.DataViewDescription(300, 60*60*24),
348         'last_week': graph.DataViewDescription(300, 60*60*24*7),
349         'last_month': graph.DataViewDescription(300, 60*60*24*30),
350     }
351     hd = graph.HistoryDatabase.from_obj({
352         'local_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
353         'local_dead_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
354         'pool_rate': graph.DataStreamDescription(True, dataview_descriptions),
355         'pool_stale_rate': graph.DataStreamDescription(True, dataview_descriptions),
356     }, hd_obj)
357     def _atomic_write(filename, data):
358         open(filename + '.new', 'w').write(data)
359         os.rename(filename + '.new', filename)
360     task.LoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj()))).start(100)
361     @pseudoshare_received.watch
362     def _(work, dead, user):
363         t = time.time()
364         hd.datastreams['local_hash_rate'].add_datum(t, work)
365         if dead:
366             hd.datastreams['local_dead_hash_rate'].add_datum(t, work)
367     def add_point():
368         if tracker.get_height(current_work.value['best_share_hash']) < 720:
369             return
370         nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
371         poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
372         t = time.time()
373         hd.datastreams['pool_rate'].add_datum(t, poolrate)
374         hd.datastreams['pool_stale_rate'].add_datum(t, poolrate - nonstalerate)
375     task.LoopingCall(add_point).start(5)
376     new_root.putChild('graph_data', WebInterface(lambda source, view: json.dumps(hd.datastreams[source].dataviews[view].get_data(time.time())), 'application/json'))
377     
378     web_root.putChild('static', static.File(os.path.join(os.path.dirname(sys.argv[0]), 'web-static')))
379     
380     return web_root