fixes for "fixes to web"
[p2pool.git] / p2pool / web.py
1 from __future__ import division
2
3 import cgi
4 import errno
5 import json
6 import os
7 import sys
8 import time
9
10 from twisted.internet import reactor, task
11 from twisted.python import log
12 from twisted.web import resource, static
13
14 from bitcoin import data as bitcoin_data
15 from . import data as p2pool_data, graphs
16 from util import graph, math
17
18 def _atomic_read(filename):
19     try:
20         with open(filename, 'rb') as f:
21             return f.read()
22     except IOError, e:
23         if e.errno != errno.ENOENT:
24             raise
25     try:
26         with open(filename + '.new', 'rb') as f:
27             return f.read()
28     except IOError, e:
29         if e.errno != errno.ENOENT:
30             raise
31     return None
32
33 def _atomic_write(filename, data):
34     with open(filename + '.new', 'wb') as f:
35         f.write(data)
36         f.flush()
37         try:
38             os.fsync(f.fileno())
39         except:
40             pass
41     try:
42         os.rename(filename + '.new', filename)
43     except os.error: # windows can't overwrite
44         os.remove(filename)
45         os.rename(filename + '.new', filename)
46
47 def get_web_root(tracker, current_work, current_work2, get_current_txouts, datadir_path, net, get_stale_counts, my_pubkey_hash, local_rate_monitor, worker_fee, p2p_node, my_share_hashes, recent_blocks, pseudoshare_received, share_received):
48     start_time = time.time()
49     
50     web_root = resource.Resource()
51     
52     def get_users():
53         height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
54         weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256, False)
55         res = {}
56         for script in sorted(weights, key=lambda s: weights[s]):
57             res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
58         return json.dumps(res)
59     
60     def get_current_scaled_txouts(scale, trunc=0):
61         txouts = get_current_txouts()
62         total = sum(txouts.itervalues())
63         results = dict((script, value*scale//total) for script, value in txouts.iteritems())
64         if trunc > 0:
65             total_random = 0
66             random_set = set()
67             for s in sorted(results, key=results.__getitem__):
68                 if results[s] >= trunc:
69                     break
70                 total_random += results[s]
71                 random_set.add(s)
72             if total_random:
73                 winner = math.weighted_choice((script, results[script]) for script in random_set)
74                 for script in random_set:
75                     del results[script]
76                 results[winner] = total_random
77         if sum(results.itervalues()) < int(scale):
78             results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
79         return results
80     
81     def get_patron_sendmany(total=None, trunc='0.01'):
82         if total is None:
83             return 'need total argument. go to patron_sendmany/<TOTAL>'
84         total = int(float(total)*1e8)
85         trunc = int(float(trunc)*1e8)
86         return dict(
87             (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
88             for script, value in get_current_scaled_txouts(total, trunc).iteritems()
89             if bitcoin_data.script2_to_address(script, net.PARENT) is not None
90         )
91     
92     def get_global_stats():
93         # averaged over last hour
94         lookbehind = 3600//net.SHARE_PERIOD
95         if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
96             return None
97         
98         nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
99         stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
100         return dict(
101             pool_nonstale_hash_rate=nonstale_hash_rate,
102             pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
103             pool_stale_prop=stale_prop,
104         )
105     
106     def get_local_stats():
107         lookbehind = 3600//net.SHARE_PERIOD
108         if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
109             return None
110         
111         global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
112         
113         my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
114         my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
115         my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
116         my_share_count = my_unstale_count + my_orphan_count + my_doa_count
117         my_stale_count = my_orphan_count + my_doa_count
118         
119         my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
120         
121         my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
122             for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1)
123             if share.hash in my_share_hashes)
124         actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp -
125             tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp)
126         share_att_s = my_work / actual_time
127         
128         miner_hash_rates = {}
129         miner_dead_hash_rates = {}
130         datums, dt = local_rate_monitor.get_datums_in_last()
131         for datum in datums:
132             miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
133             if datum['dead']:
134                 miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
135         
136         (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
137         
138         return dict(
139             my_hash_rates_in_last_hour=dict(
140                 note="DEPRECATED",
141                 nonstale=share_att_s,
142                 rewarded=share_att_s/(1 - global_stale_prop),
143                 actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
144             ),
145             my_share_counts_in_last_hour=dict(
146                 shares=my_share_count,
147                 unstale_shares=my_unstale_count,
148                 stale_shares=my_stale_count,
149                 orphan_stale_shares=my_orphan_count,
150                 doa_stale_shares=my_doa_count,
151             ),
152             my_stale_proportions_in_last_hour=dict(
153                 stale=my_stale_prop,
154                 orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
155                 dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
156             ),
157             miner_hash_rates=miner_hash_rates,
158             miner_dead_hash_rates=miner_dead_hash_rates,
159             efficiency_if_miner_perfect=(1 - stale_orphan_shares/shares)/(1 - global_stale_prop) if shares else None, # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection
160         )
161     
162     class WebInterface(resource.Resource):
163         def __init__(self, func, mime_type='application/json', args=()):
164             resource.Resource.__init__(self)
165             self.func, self.mime_type, self.args = func, mime_type, args
166         
167         def getChild(self, child, request):
168             return WebInterface(self.func, self.mime_type, self.args + (child,))
169         
170         def render_GET(self, request):
171             request.setHeader('Content-Type', self.mime_type)
172             request.setHeader('Access-Control-Allow-Origin', '*')
173             res = self.func(*self.args)
174             return json.dumps(res) if self.mime_type == 'application/json' else res
175     
176     web_root.putChild('rate', WebInterface(lambda: p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)/(1-p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))))
177     web_root.putChild('difficulty', WebInterface(lambda: bitcoin_data.target_to_difficulty(tracker.shares[current_work.value['best_share_hash']].max_target)))
178     web_root.putChild('users', WebInterface(get_users))
179     web_root.putChild('fee', WebInterface(lambda: worker_fee))
180     web_root.putChild('current_payouts', WebInterface(lambda: dict((bitcoin_data.script2_to_human(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems())))
181     web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain'))
182     web_root.putChild('global_stats', WebInterface(get_global_stats))
183     web_root.putChild('local_stats', WebInterface(get_local_stats))
184     web_root.putChild('peer_addresses', WebInterface(lambda: ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues()), 'text/plain'))
185     web_root.putChild('peer_versions', WebInterface(lambda: ''.join('%s:%i ' % peer.addr + peer.other_sub_version + '\n' for peer in p2p_node.peers.itervalues()), 'text/plain'))
186     web_root.putChild('payout_addr', WebInterface(lambda: bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)))
187     web_root.putChild('recent_blocks', WebInterface(lambda: recent_blocks))
188     web_root.putChild('uptime', WebInterface(lambda: time.time() - start_time))
189     
190     try:
191         from . import draw
192         web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
193     except ImportError:
194         print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
195     
196     new_root = resource.Resource()
197     web_root.putChild('web', new_root)
198     
199     stat_log = []
200     if os.path.exists(os.path.join(datadir_path, 'stats')):
201         try:
202             with open(os.path.join(datadir_path, 'stats'), 'rb') as f:
203                 stat_log = json.loads(f.read())
204         except:
205             log.err(None, 'Error loading stats:')
206     def update_stat_log():
207         while stat_log and stat_log[0]['time'] < time.time() - 24*60*60:
208             stat_log.pop(0)
209         
210         lookbehind = 3600//net.SHARE_PERIOD
211         if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
212             return None
213         
214         global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
215         (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
216         
217         miner_hash_rates = {}
218         miner_dead_hash_rates = {}
219         datums, dt = local_rate_monitor.get_datums_in_last()
220         for datum in datums:
221             miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
222             if datum['dead']:
223                 miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
224         
225         stat_log.append(dict(
226             time=time.time(),
227             pool_hash_rate=p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)/(1-global_stale_prop),
228             pool_stale_prop=global_stale_prop,
229             local_hash_rates=miner_hash_rates,
230             local_dead_hash_rates=miner_dead_hash_rates,
231             shares=shares,
232             stale_shares=stale_orphan_shares + stale_doa_shares,
233             stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares),
234             current_payout=get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8,
235             peers=dict(
236                 incoming=sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
237                 outgoing=sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming),
238             ),
239             attempts_to_share=bitcoin_data.target_to_average_attempts(tracker.shares[current_work.value['best_share_hash']].max_target),
240             attempts_to_block=bitcoin_data.target_to_average_attempts(current_work.value['bits'].target),
241             block_value=current_work2.value['subsidy']*1e-8,
242         ))
243         
244         with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
245             f.write(json.dumps(stat_log))
246     task.LoopingCall(update_stat_log).start(5*60)
247     new_root.putChild('log', WebInterface(lambda: stat_log))
248     
249     class ShareExplorer(resource.Resource):
250         def __init__(self, share_hash):
251             self.share_hash = share_hash
252         def render_GET(self, request):
253             request.setHeader('Content-Type', 'text/html')
254             if self.share_hash not in tracker.shares:
255                 return 'share not known'
256             share = tracker.shares[self.share_hash]
257             
258             format_bits = lambda bits: '%f (bits=%#8x) Work required: %sH' % (bitcoin_data.target_to_difficulty(bits.target), bits.bits, math.format(bitcoin_data.target_to_average_attempts(bits.target)))
259             
260             request.write('<h1>%s <a href="%x">%s</a></h1>' % (share.__class__.__name__, share.hash, p2pool_data.format_hash(share.hash)))
261             if share.previous_hash is not None:
262                 request.write('<p>Previous: <a href="%x">%s</a>' % (share.previous_hash, p2pool_data.format_hash(share.previous_hash)))
263             if tracker.get_height(share.hash) >= 100:
264                 jump_hash = tracker.get_nth_parent_hash(share.hash, 100)
265                 if jump_hash is not None:
266                     request.write(' (100 jump <a href="%x">%s</a>)' % (jump_hash, p2pool_data.format_hash(jump_hash)))
267             request.write('</p>')
268             request.write('<p>Next: %s</p>' % (', '.join('<a href="%x">%s</a>' % (next, p2pool_data.format_hash(next)) for next in sorted(tracker.reverse_shares.get(share.hash, set()), key=lambda sh: -len(tracker.reverse_shares.get(sh, set())))),))
269             request.write('<p>Verified: %s</p>' % (share.hash in tracker.verified.shares,))
270             request.write('<p>Time first seen: %s</p>' % (time.ctime(start_time if share.time_seen == 0 else share.time_seen),))
271             request.write('<p>Peer first received from: %s</p>' % ('%s:%i' % share.peer.addr if share.peer is not None else 'self or cache',))
272             
273             request.write('<h2>Share data</h2>')
274             request.write('<p>Timestamp: %s (%i)</p>' % (time.ctime(share.timestamp), share.timestamp))
275             request.write('<p>Difficulty: %s</p>' % (format_bits(share.share_info['bits']),))
276             request.write('<p>Minimum difficulty: %s</p>' % (format_bits(share.share_info.get('max_bits', share.share_info['bits'])),))
277             request.write('<p>Payout script: %s</p>' % (bitcoin_data.script2_to_human(share.new_script, share.net.PARENT),))
278             request.write('<p>Donation: %.2f%%</p>' % (share.share_data['donation']/65535*100,))
279             request.write('<p>Stale info: %s</p>' % ({0: 'none', 253: 'had an orphan', 254: 'had a dead'}.get(share.share_data['stale_info'], 'unknown %i' % (share.share_data['stale_info'],)),))
280             request.write('<p>Nonce: %s</p>' % (cgi.escape(repr(share.share_data['nonce'])),))
281             
282             request.write('<h2>Block header</h2>')
283             request.write('<p>Hash: <a href="%s%064x">%064x</a></p>' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash, share.header_hash))
284             request.write('<p>Version: %i</p>' % (share.header['version'],))
285             request.write('<p>Previous block: <a href="%s%064x">%064x</a></p>' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header['previous_block'], share.header['previous_block']))
286             request.write('<p>Timestamp: %s (%i)</p>' % (time.ctime(share.header['timestamp']), share.header['timestamp']))
287             request.write('<p>Difficulty: %f (bits=%#8x) Work: %sH</p>' % (bitcoin_data.target_to_difficulty(share.header['bits'].target), share.header['bits'].bits, math.format(bitcoin_data.target_to_average_attempts(share.header['bits'].target))))
288             request.write('<p>Nonce: %i</p>' % (share.header['nonce'],))
289             if share.other_txs is not None:
290                 tx_count = len(share.other_txs)
291             elif len(share.merkle_link['branch']) == 0:
292                 tx_count = 1
293             else:
294                 tx_count = 'between %i and %i' % (2**len(share.merkle_link['branch'])//2+1, 2**len(share.merkle_link['branch']))
295             request.write('<p>Transactions: %s</p>' % (tx_count,))
296             coinbase = share.share_data['coinbase'].ljust(2, '\x00')
297             request.write('<p>Coinbase: %s %s</p>' % (cgi.escape(repr(coinbase)), coinbase.encode('hex')))
298             request.write('<p>Generation value: %.8f %s</p>' % (share.share_data['subsidy']*1e-8, net.PARENT.SYMBOL))
299             #request.write('<p>Generation txn: %32x</p>' % (share.gentx_hash,))
300             
301             return ''
302     class Explorer(resource.Resource):
303         def render_GET(self, request):
304             if not request.path.endswith('/'):
305                 request.redirect(request.path + '/')
306                 return ''
307             request.setHeader('Content-Type', 'text/html')
308             request.write('<h1>P2Pool share explorer</h1>')
309             
310             request.write('<h2>Verified heads</h2>')
311             request.write('<ul>')
312             for h in tracker.verified.heads:
313                 request.write('<li><a href="%x">%s%s</a></li>' % (h, p2pool_data.format_hash(h), ' BEST' if h == current_work.value['best_share_hash'] else ''))
314             request.write('</ul>')
315             
316             request.write('<h2>Verified tails</h2>')
317             request.write('<ul>')
318             for tail in tracker.verified.tails:
319                 for h in tracker.reverse_shares.get(tail, set()):
320                     request.write('<li><a href="%x">%s%s</a></li>' % (h, p2pool_data.format_hash(h), ' BEST' if h == current_work.value['best_share_hash'] else ''))
321             request.write('</ul>')
322             
323             request.write('<h2>Heads</h2>')
324             request.write('<ul>')
325             for h in tracker.heads:
326                 request.write('<li><a href="%x">%s%s</a></li>' % (h, p2pool_data.format_hash(h), ' BEST' if h == current_work.value['best_share_hash'] else ''))
327             request.write('</ul>')
328             
329             request.write('<h2>Tails</h2>')
330             request.write('<ul>')
331             for tail in tracker.tails:
332                 for h in tracker.reverse_shares.get(tail, set()):
333                     request.write('<li><a href="%x">%s%s</a></li>' % (h, p2pool_data.format_hash(h), ' BEST' if h == current_work.value['best_share_hash'] else ''))
334             request.write('</ul>')
335             
336             return ''
337         def getChild(self, child, request):
338             if not child:
339                 return self
340             return ShareExplorer(int(child, 16))
341     new_root.putChild('explorer', Explorer())
342     
343     grapher = graphs.Grapher(os.path.join(datadir_path, 'rrd'))
344     web_root.putChild('graphs', grapher.get_resource())
345     def add_point():
346         if tracker.get_height(current_work.value['best_share_hash']) < 720:
347             return
348         nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
349         poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
350         grapher.add_poolrate_point(poolrate, poolrate - nonstalerate)
351     task.LoopingCall(add_point).start(100)
352     @pseudoshare_received.watch
353     def _(work, dead, user, had_vip_pass):
354         reactor.callLater(1, grapher.add_localrate_point, work, dead)
355         if user is not None and had_vip_pass:
356             reactor.callLater(1, grapher.add_localminer_point, user, work, dead)
357     
358     hd_path = os.path.join(datadir_path, 'graph_db')
359     hd_data = _atomic_read(hd_path)
360     hd_obj = {}
361     if hd_data is not None:
362         try:
363             hd_obj = json.loads(hd_data)
364         except Exception:
365             log.err(None, 'Error reading graph database:')
366     dataview_descriptions = {
367         'last_hour': graph.DataViewDescription(150, 60*60),
368         'last_day': graph.DataViewDescription(300, 60*60*24),
369         'last_week': graph.DataViewDescription(300, 60*60*24*7),
370         'last_month': graph.DataViewDescription(300, 60*60*24*30),
371         'last_year': graph.DataViewDescription(300, 60*60*24*365.25),
372     }
373     def combine_and_keep_largest(*dicts):
374         res = {}
375         for d in dicts:
376             for k, v in d.iteritems():
377                 res[k] = res.get(k, 0) + v
378         return dict((k, v) for k, v in sorted(res.iteritems(), key=lambda (k, v): v)[-30:] if v)
379     hd = graph.HistoryDatabase.from_obj({
380         'local_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
381         'local_dead_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
382         'local_share_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
383         'local_dead_share_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
384         'pool_rate': graph.DataStreamDescription(True, dataview_descriptions),
385         'pool_stale_rate': graph.DataStreamDescription(True, dataview_descriptions),
386         'current_payout': graph.DataStreamDescription(True, dataview_descriptions),
387         'incoming_peers': graph.DataStreamDescription(True, dataview_descriptions),
388         'outgoing_peers': graph.DataStreamDescription(True, dataview_descriptions),
389         'miner_hash_rates': graph.DataStreamDescription(False, dataview_descriptions, {}, combine_and_keep_largest, math.mult_dict),
390         'miner_dead_hash_rates': graph.DataStreamDescription(False, dataview_descriptions, {}, combine_and_keep_largest, math.mult_dict),
391     }, hd_obj)
392     task.LoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj()))).start(100)
393     @pseudoshare_received.watch
394     def _(work, dead, user, had_vip_pass):
395         t = time.time()
396         hd.datastreams['local_hash_rate'].add_datum(t, work)
397         if dead:
398             hd.datastreams['local_dead_hash_rate'].add_datum(t, work)
399         if user is not None:
400             hd.datastreams['miner_hash_rates'].add_datum(t, {user: work})
401             if dead:
402                 hd.datastreams['miner_dead_hash_rates'].add_datum(t, {user: work})
403     @share_received.watch
404     def _(work, dead):
405         t = time.time()
406         hd.datastreams['local_share_hash_rate'].add_datum(t, work)
407         if dead:
408             hd.datastreams['local_dead_share_hash_rate'].add_datum(t, work)
409     def add_point():
410         if tracker.get_height(current_work.value['best_share_hash']) < 720:
411             return
412         nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
413         poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
414         t = time.time()
415         hd.datastreams['pool_rate'].add_datum(t, poolrate)
416         hd.datastreams['pool_stale_rate'].add_datum(t, poolrate - nonstalerate)
417         hd.datastreams['current_payout'].add_datum(t, get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8)
418         hd.datastreams['incoming_peers'].add_datum(t, sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming))
419         hd.datastreams['outgoing_peers'].add_datum(t, sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming))
420     task.LoopingCall(add_point).start(5)
421     new_root.putChild('graph_data', WebInterface(lambda source, view: hd.datastreams[source].dataviews[view].get_data(time.time())))
422     
423     web_root.putChild('static', static.File(os.path.join(os.path.dirname(sys.argv[0]), 'web-static')))
424     
425     return web_root