added /pings web page
[p2pool.git] / p2pool / web.py
1 from __future__ import division
2
3 import errno
4 import json
5 import os
6 import sys
7 import time
8
9 from twisted.internet import defer, task
10 from twisted.python import log
11 from twisted.web import resource, static
12
13 import p2pool
14 from bitcoin import data as bitcoin_data
15 from . import data as p2pool_data
16 from util import deferred_resource, graph, math
17
18 def _atomic_read(filename):
19     try:
20         with open(filename, 'rb') as f:
21             return f.read()
22     except IOError, e:
23         if e.errno != errno.ENOENT:
24             raise
25     try:
26         with open(filename + '.new', 'rb') as f:
27             return f.read()
28     except IOError, e:
29         if e.errno != errno.ENOENT:
30             raise
31     return None
32
33 def _atomic_write(filename, data):
34     with open(filename + '.new', 'wb') as f:
35         f.write(data)
36         f.flush()
37         try:
38             os.fsync(f.fileno())
39         except:
40             pass
41     try:
42         os.rename(filename + '.new', filename)
43     except: # XXX windows can't overwrite
44         os.remove(filename)
45         os.rename(filename + '.new', filename)
46
47 def get_web_root(tracker, bitcoind_work, get_current_txouts, datadir_path, net, get_stale_counts, my_pubkey_hash, local_rate_monitor, worker_fee, p2p_node, my_share_hashes, pseudoshare_received, share_received, best_share_var):
48     start_time = time.time()
49     
50     web_root = resource.Resource()
51     
52     def get_users():
53         height, last = tracker.get_height_and_last(best_share_var.value)
54         weights, total_weight, donation_weight = tracker.get_cumulative_weights(best_share_var.value, min(height, 720), 65535*2**256)
55         res = {}
56         for script in sorted(weights, key=lambda s: weights[s]):
57             res[bitcoin_data.script2_to_address(script, net.PARENT)] = weights[script]/total_weight
58         return res
59     
60     def get_current_scaled_txouts(scale, trunc=0):
61         txouts = get_current_txouts()
62         total = sum(txouts.itervalues())
63         results = dict((script, value*scale//total) for script, value in txouts.iteritems())
64         if trunc > 0:
65             total_random = 0
66             random_set = set()
67             for s in sorted(results, key=results.__getitem__):
68                 if results[s] >= trunc:
69                     break
70                 total_random += results[s]
71                 random_set.add(s)
72             if total_random:
73                 winner = math.weighted_choice((script, results[script]) for script in random_set)
74                 for script in random_set:
75                     del results[script]
76                 results[winner] = total_random
77         if sum(results.itervalues()) < int(scale):
78             results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
79         return results
80     
81     def get_patron_sendmany(total=None, trunc='0.01'):
82         if total is None:
83             return 'need total argument. go to patron_sendmany/<TOTAL>'
84         total = int(float(total)*1e8)
85         trunc = int(float(trunc)*1e8)
86         return json.dumps(dict(
87             (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
88             for script, value in get_current_scaled_txouts(total, trunc).iteritems()
89             if bitcoin_data.script2_to_address(script, net.PARENT) is not None
90         ))
91     
92     def get_local_rates():
93         miner_hash_rates = {}
94         miner_dead_hash_rates = {}
95         datums, dt = local_rate_monitor.get_datums_in_last()
96         for datum in datums:
97             miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
98             if datum['dead']:
99                 miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
100         return miner_hash_rates, miner_dead_hash_rates
101     
102     def get_global_stats():
103         # averaged over last hour
104         lookbehind = 3600//net.SHARE_PERIOD
105         if tracker.get_height(best_share_var.value) < lookbehind:
106             return None
107         
108         nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, best_share_var.value, lookbehind)
109         stale_prop = p2pool_data.get_average_stale_prop(tracker, best_share_var.value, lookbehind)
110         return dict(
111             pool_nonstale_hash_rate=nonstale_hash_rate,
112             pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
113             pool_stale_prop=stale_prop,
114             min_difficulty=bitcoin_data.target_to_difficulty(tracker.items[best_share_var.value].max_target),
115         )
116     
117     def get_local_stats():
118         lookbehind = 3600//net.SHARE_PERIOD
119         if tracker.get_height(best_share_var.value) < lookbehind:
120             return None
121         
122         global_stale_prop = p2pool_data.get_average_stale_prop(tracker, best_share_var.value, lookbehind)
123         
124         my_unstale_count = sum(1 for share in tracker.get_chain(best_share_var.value, lookbehind) if share.hash in my_share_hashes)
125         my_orphan_count = sum(1 for share in tracker.get_chain(best_share_var.value, lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 'orphan')
126         my_doa_count = sum(1 for share in tracker.get_chain(best_share_var.value, lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 'doa')
127         my_share_count = my_unstale_count + my_orphan_count + my_doa_count
128         my_stale_count = my_orphan_count + my_doa_count
129         
130         my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
131         
132         my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
133             for share in tracker.get_chain(best_share_var.value, lookbehind - 1)
134             if share.hash in my_share_hashes)
135         actual_time = (tracker.items[best_share_var.value].timestamp -
136             tracker.items[tracker.get_nth_parent_hash(best_share_var.value, lookbehind - 1)].timestamp)
137         share_att_s = my_work / actual_time
138         
139         miner_hash_rates, miner_dead_hash_rates = get_local_rates()
140         (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
141         
142         return dict(
143             my_hash_rates_in_last_hour=dict(
144                 note="DEPRECATED",
145                 nonstale=share_att_s,
146                 rewarded=share_att_s/(1 - global_stale_prop),
147                 actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
148             ),
149             my_share_counts_in_last_hour=dict(
150                 shares=my_share_count,
151                 unstale_shares=my_unstale_count,
152                 stale_shares=my_stale_count,
153                 orphan_stale_shares=my_orphan_count,
154                 doa_stale_shares=my_doa_count,
155             ),
156             my_stale_proportions_in_last_hour=dict(
157                 stale=my_stale_prop,
158                 orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
159                 dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
160             ),
161             miner_hash_rates=miner_hash_rates,
162             miner_dead_hash_rates=miner_dead_hash_rates,
163             efficiency_if_miner_perfect=(1 - stale_orphan_shares/shares)/(1 - global_stale_prop) if shares else None, # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection
164             efficiency=(1 - (stale_orphan_shares+stale_doa_shares)/shares)/(1 - global_stale_prop) if shares else None,
165             peers=dict(
166                 incoming=sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
167                 outgoing=sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming),
168             ),
169             shares=dict(
170                 total=shares,
171                 orphan=stale_orphan_shares,
172                 dead=stale_doa_shares,
173             ),
174             uptime=time.time() - start_time,
175             block_value=bitcoind_work.value['subsidy']*1e-8,
176             warnings=p2pool_data.get_warnings(tracker, best_share_var.value, net),
177         )
178     
179     class WebInterface(deferred_resource.DeferredResource):
180         def __init__(self, func, mime_type='application/json', args=()):
181             deferred_resource.DeferredResource.__init__(self)
182             self.func, self.mime_type, self.args = func, mime_type, args
183         
184         def getChild(self, child, request):
185             return WebInterface(self.func, self.mime_type, self.args + (child,))
186         
187         @defer.inlineCallbacks
188         def render_GET(self, request):
189             request.setHeader('Content-Type', self.mime_type)
190             request.setHeader('Access-Control-Allow-Origin', '*')
191             res = yield self.func(*self.args)
192             defer.returnValue(json.dumps(res) if self.mime_type == 'application/json' else res)
193     
194     web_root.putChild('rate', WebInterface(lambda: p2pool_data.get_pool_attempts_per_second(tracker, best_share_var.value, 720)/(1-p2pool_data.get_average_stale_prop(tracker, best_share_var.value, 720))))
195     web_root.putChild('difficulty', WebInterface(lambda: bitcoin_data.target_to_difficulty(tracker.items[best_share_var.value].max_target)))
196     web_root.putChild('users', WebInterface(get_users))
197     web_root.putChild('fee', WebInterface(lambda: worker_fee))
198     web_root.putChild('current_payouts', WebInterface(lambda: dict((bitcoin_data.script2_to_address(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems())))
199     web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain'))
200     web_root.putChild('global_stats', WebInterface(get_global_stats))
201     web_root.putChild('local_stats', WebInterface(get_local_stats))
202     web_root.putChild('peer_addresses', WebInterface(lambda: ['%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port) for peer in p2p_node.peers.itervalues()]))
203     web_root.putChild('pings', WebInterface(defer.inlineCallbacks(lambda: defer.returnValue(
204         dict([(a, (yield b)) for a, b in
205             [(
206                 '%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port),
207                 defer.inlineCallbacks(lambda peer=peer: defer.returnValue(
208                     min([(yield peer.do_ping().addCallback(lambda x: x/0.001).addErrback(lambda fail: None)) for i in xrange(3)])
209                 ))()
210             ) for peer in list(p2p_node.peers.itervalues())]
211         ])
212     ))))
213     web_root.putChild('peer_versions', WebInterface(lambda: dict(('%s:%i' % peer.addr, peer.other_sub_version) for peer in p2p_node.peers.itervalues())))
214     web_root.putChild('payout_addr', WebInterface(lambda: bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)))
215     web_root.putChild('recent_blocks', WebInterface(lambda: [dict(ts=s.timestamp, hash='%064x' % s.header_hash) for s in tracker.get_chain(best_share_var.value, 24*60*60//net.SHARE_PERIOD) if s.pow_hash <= s.header['bits'].target]))
216     web_root.putChild('uptime', WebInterface(lambda: time.time() - start_time))
217     web_root.putChild('stale_rates', WebInterface(lambda: p2pool_data.get_stale_counts(tracker, best_share_var.value, 720, rates=True)))
218     
219     new_root = resource.Resource()
220     web_root.putChild('web', new_root)
221     
222     stat_log = []
223     if os.path.exists(os.path.join(datadir_path, 'stats')):
224         try:
225             with open(os.path.join(datadir_path, 'stats'), 'rb') as f:
226                 stat_log = json.loads(f.read())
227         except:
228             log.err(None, 'Error loading stats:')
229     def update_stat_log():
230         while stat_log and stat_log[0]['time'] < time.time() - 24*60*60:
231             stat_log.pop(0)
232         
233         lookbehind = 3600//net.SHARE_PERIOD
234         if tracker.get_height(best_share_var.value) < lookbehind:
235             return None
236         
237         global_stale_prop = p2pool_data.get_average_stale_prop(tracker, best_share_var.value, lookbehind)
238         (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
239         miner_hash_rates, miner_dead_hash_rates = get_local_rates()
240         
241         stat_log.append(dict(
242             time=time.time(),
243             pool_hash_rate=p2pool_data.get_pool_attempts_per_second(tracker, best_share_var.value, lookbehind)/(1-global_stale_prop),
244             pool_stale_prop=global_stale_prop,
245             local_hash_rates=miner_hash_rates,
246             local_dead_hash_rates=miner_dead_hash_rates,
247             shares=shares,
248             stale_shares=stale_orphan_shares + stale_doa_shares,
249             stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares),
250             current_payout=get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8,
251             peers=dict(
252                 incoming=sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
253                 outgoing=sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming),
254             ),
255             attempts_to_share=bitcoin_data.target_to_average_attempts(tracker.items[best_share_var.value].max_target),
256             attempts_to_block=bitcoin_data.target_to_average_attempts(bitcoind_work.value['bits'].target),
257             block_value=bitcoind_work.value['subsidy']*1e-8,
258         ))
259         
260         with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
261             f.write(json.dumps(stat_log))
262     task.LoopingCall(update_stat_log).start(5*60)
263     new_root.putChild('log', WebInterface(lambda: stat_log))
264     
265     def get_share(share_hash_str):
266         if int(share_hash_str, 16) not in tracker.items:
267             return None
268         share = tracker.items[int(share_hash_str, 16)]
269         
270         return dict(
271             parent='%064x' % share.previous_hash,
272             children=['%064x' % x for x in sorted(tracker.reverse.get(share.hash, set()), key=lambda sh: -len(tracker.reverse.get(sh, set())))], # sorted from most children to least children
273             local=dict(
274                 verified=share.hash in tracker.verified.items,
275                 time_first_seen=start_time if share.time_seen == 0 else share.time_seen,
276                 peer_first_received_from=share.peer.addr if share.peer is not None else None,
277             ),
278             share_data=dict(
279                 timestamp=share.timestamp,
280                 target=share.target,
281                 max_target=share.max_target,
282                 payout_address=bitcoin_data.script2_to_address(share.new_script, net.PARENT),
283                 donation=share.share_data['donation']/65535,
284                 stale_info=share.share_data['stale_info'],
285                 nonce=share.share_data['nonce'],
286                 desired_version=share.share_data['desired_version'],
287             ),
288             block=dict(
289                 hash='%064x' % share.header_hash,
290                 header=dict(
291                     version=share.header['version'],
292                     previous_block='%064x' % share.header['previous_block'],
293                     merkle_root='%064x' % share.header['merkle_root'],
294                     timestamp=share.header['timestamp'],
295                     target=share.header['bits'].target,
296                     nonce=share.header['nonce'],
297                 ),
298                 gentx=dict(
299                     hash='%064x' % share.gentx_hash,
300                     coinbase=share.share_data['coinbase'].ljust(2, '\x00').encode('hex'),
301                     value=share.share_data['subsidy']*1e-8,
302                 ),
303                 txn_count_range=[len(share.other_txs), len(share.other_txs)] if share.other_txs is not None else 1 if len(share.merkle_link['branch']) == 0 else [2**len(share.merkle_link['branch'])//2+1, 2**len(share.merkle_link['branch'])],
304             ),
305         )
306     new_root.putChild('share', WebInterface(lambda share_hash_str: get_share(share_hash_str)))
307     new_root.putChild('heads', WebInterface(lambda: ['%064x' % x for x in tracker.heads]))
308     new_root.putChild('verified_heads', WebInterface(lambda: ['%064x' % x for x in tracker.verified.heads]))
309     new_root.putChild('tails', WebInterface(lambda: ['%064x' % x for t in tracker.tails for x in tracker.reverse.get(t, set())]))
310     new_root.putChild('verified_tails', WebInterface(lambda: ['%064x' % x for t in tracker.verified.tails for x in tracker.verified.reverse.get(t, set())]))
311     new_root.putChild('best_share_hash', WebInterface(lambda: '%064x' % best_share_var.value))
312     new_root.putChild('currency_info', WebInterface(lambda: dict(
313         symbol=net.PARENT.SYMBOL,
314         block_explorer_url_prefix=net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
315         address_explorer_url_prefix=net.PARENT.ADDRESS_EXPLORER_URL_PREFIX,
316     )))
317     new_root.putChild('version', WebInterface(lambda: p2pool.__version__))
318     
319     hd_path = os.path.join(datadir_path, 'graph_db')
320     hd_data = _atomic_read(hd_path)
321     hd_obj = {}
322     if hd_data is not None:
323         try:
324             hd_obj = json.loads(hd_data)
325         except Exception:
326             log.err(None, 'Error reading graph database:')
327     dataview_descriptions = {
328         'last_hour': graph.DataViewDescription(150, 60*60),
329         'last_day': graph.DataViewDescription(300, 60*60*24),
330         'last_week': graph.DataViewDescription(300, 60*60*24*7),
331         'last_month': graph.DataViewDescription(300, 60*60*24*30),
332         'last_year': graph.DataViewDescription(300, 60*60*24*365.25),
333     }
334     def build_pool_rates(ds_name, ds_desc, dv_name, dv_desc, obj):
335         if not obj:
336             last_bin_end = 0
337             bins = dv_desc.bin_count*[{}]
338         else:
339             pool_rate = obj['pool_rate'][dv_name]
340             pool_stale_rate = obj['pool_stale_rate'][dv_name]
341             last_bin_end = max(pool_rate['last_bin_end'], pool_stale_rate['last_bin_end'])
342             bins = dv_desc.bin_count*[{}]
343             def get_value(obj, t):
344                 n = int((obj['last_bin_end'] - t)/dv_desc.bin_width)
345                 if n < 0 or n >= dv_desc.bin_count:
346                     return None, 0
347                 total, count = obj['bins'][n].get('null', [0, 0])
348                 if count == 0:
349                     return None, 0
350                 return total/count, count
351             def get_bin(t):
352                 total, total_count = get_value(pool_rate, t)
353                 bad, bad_count = get_value(pool_stale_rate, t)
354                 if total is None or bad is None:
355                     return {}
356                 count = int((total_count+bad_count)/2+1/2)
357                 return dict(good=[(total-bad)*count, count], bad=[bad*count, count], null=[0, count])
358             bins = [get_bin(last_bin_end - (i+1/2)*dv_desc.bin_width) for i in xrange(dv_desc.bin_count)]
359         return graph.DataView(dv_desc, ds_desc, last_bin_end, bins)
360     hd = graph.HistoryDatabase.from_obj({
361         'local_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
362         'local_dead_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
363         'local_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
364         'local_dead_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
365         'pool_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
366             multivalue_undefined_means_0=True, default_func=build_pool_rates),
367         'current_payout': graph.DataStreamDescription(dataview_descriptions),
368         'current_payouts': graph.DataStreamDescription(dataview_descriptions, multivalues=True),
369         'incoming_peers': graph.DataStreamDescription(dataview_descriptions),
370         'outgoing_peers': graph.DataStreamDescription(dataview_descriptions),
371         'miner_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
372         'miner_dead_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
373         'desired_versions': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
374             multivalue_undefined_means_0=True),
375     }, hd_obj)
376     task.LoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj()))).start(100)
377     @pseudoshare_received.watch
378     def _(work, dead, user):
379         t = time.time()
380         hd.datastreams['local_hash_rate'].add_datum(t, work)
381         if dead:
382             hd.datastreams['local_dead_hash_rate'].add_datum(t, work)
383         if user is not None:
384             hd.datastreams['miner_hash_rates'].add_datum(t, {user: work})
385             if dead:
386                 hd.datastreams['miner_dead_hash_rates'].add_datum(t, {user: work})
387     @share_received.watch
388     def _(work, dead):
389         t = time.time()
390         hd.datastreams['local_share_hash_rate'].add_datum(t, work)
391         if dead:
392             hd.datastreams['local_dead_share_hash_rate'].add_datum(t, work)
393     def add_point():
394         if tracker.get_height(best_share_var.value) < 720:
395             return
396         t = time.time()
397         hd.datastreams['pool_rates'].add_datum(t, p2pool_data.get_stale_counts(tracker, best_share_var.value, 720, rates=True))
398         current_txouts = get_current_txouts()
399         hd.datastreams['current_payout'].add_datum(t, current_txouts.get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8)
400         miner_hash_rates, miner_dead_hash_rates = get_local_rates()
401         current_txouts_by_address = dict((bitcoin_data.script2_to_address(script, net.PARENT), amount) for script, amount in current_txouts.iteritems())
402         hd.datastreams['current_payouts'].add_datum(t, dict((user, current_txouts_by_address[user]*1e-8) for user in miner_hash_rates if user in current_txouts_by_address))
403         hd.datastreams['incoming_peers'].add_datum(t, sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming))
404         hd.datastreams['outgoing_peers'].add_datum(t, sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming))
405         
406         vs = p2pool_data.get_desired_version_counts(tracker, best_share_var.value, 720)
407         vs_total = sum(vs.itervalues())
408         hd.datastreams['desired_versions'].add_datum(t, dict((str(k), v/vs_total) for k, v in vs.iteritems()))
409     task.LoopingCall(add_point).start(5)
410     new_root.putChild('graph_data', WebInterface(lambda source, view: hd.datastreams[source].dataviews[view].get_data(time.time())))
411     
412     web_root.putChild('static', static.File(os.path.join(os.path.dirname(sys.argv[0]), 'web-static')))
413     
414     return web_root