fixed share explorer moved message
[p2pool.git] / p2pool / web.py
1 from __future__ import division
2
3 import errno
4 import json
5 import os
6 import sys
7 import time
8
9 from twisted.internet import task
10 from twisted.python import log
11 from twisted.web import resource, static
12
13 from bitcoin import data as bitcoin_data
14 from . import data as p2pool_data
15 from util import graph, math
16
17 def _atomic_read(filename):
18     try:
19         with open(filename, 'rb') as f:
20             return f.read()
21     except IOError, e:
22         if e.errno != errno.ENOENT:
23             raise
24     try:
25         with open(filename + '.new', 'rb') as f:
26             return f.read()
27     except IOError, e:
28         if e.errno != errno.ENOENT:
29             raise
30     return None
31
32 def _atomic_write(filename, data):
33     with open(filename + '.new', 'wb') as f:
34         f.write(data)
35         f.flush()
36         try:
37             os.fsync(f.fileno())
38         except:
39             pass
40     try:
41         os.rename(filename + '.new', filename)
42     except os.error: # windows can't overwrite
43         os.remove(filename)
44         os.rename(filename + '.new', filename)
45
46 def get_web_root(tracker, current_work, current_work2, get_current_txouts, datadir_path, net, get_stale_counts, my_pubkey_hash, local_rate_monitor, worker_fee, p2p_node, my_share_hashes, recent_blocks, pseudoshare_received, share_received):
47     start_time = time.time()
48     
49     web_root = resource.Resource()
50     
51     def get_users():
52         height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
53         weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
54         res = {}
55         for script in sorted(weights, key=lambda s: weights[s]):
56             res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
57         return json.dumps(res)
58     
59     def get_current_scaled_txouts(scale, trunc=0):
60         txouts = get_current_txouts()
61         total = sum(txouts.itervalues())
62         results = dict((script, value*scale//total) for script, value in txouts.iteritems())
63         if trunc > 0:
64             total_random = 0
65             random_set = set()
66             for s in sorted(results, key=results.__getitem__):
67                 if results[s] >= trunc:
68                     break
69                 total_random += results[s]
70                 random_set.add(s)
71             if total_random:
72                 winner = math.weighted_choice((script, results[script]) for script in random_set)
73                 for script in random_set:
74                     del results[script]
75                 results[winner] = total_random
76         if sum(results.itervalues()) < int(scale):
77             results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
78         return results
79     
80     def get_patron_sendmany(total=None, trunc='0.01'):
81         if total is None:
82             return 'need total argument. go to patron_sendmany/<TOTAL>'
83         total = int(float(total)*1e8)
84         trunc = int(float(trunc)*1e8)
85         return dict(
86             (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
87             for script, value in get_current_scaled_txouts(total, trunc).iteritems()
88             if bitcoin_data.script2_to_address(script, net.PARENT) is not None
89         )
90     
91     def get_global_stats():
92         # averaged over last hour
93         lookbehind = 3600//net.SHARE_PERIOD
94         if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
95             return None
96         
97         nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
98         stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
99         return dict(
100             pool_nonstale_hash_rate=nonstale_hash_rate,
101             pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
102             pool_stale_prop=stale_prop,
103         )
104     
105     def get_local_stats():
106         lookbehind = 3600//net.SHARE_PERIOD
107         if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
108             return None
109         
110         global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
111         
112         my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
113         my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
114         my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
115         my_share_count = my_unstale_count + my_orphan_count + my_doa_count
116         my_stale_count = my_orphan_count + my_doa_count
117         
118         my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
119         
120         my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
121             for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1)
122             if share.hash in my_share_hashes)
123         actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp -
124             tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp)
125         share_att_s = my_work / actual_time
126         
127         miner_hash_rates = {}
128         miner_dead_hash_rates = {}
129         datums, dt = local_rate_monitor.get_datums_in_last()
130         for datum in datums:
131             miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
132             if datum['dead']:
133                 miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
134         
135         (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
136         
137         return dict(
138             my_hash_rates_in_last_hour=dict(
139                 note="DEPRECATED",
140                 nonstale=share_att_s,
141                 rewarded=share_att_s/(1 - global_stale_prop),
142                 actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
143             ),
144             my_share_counts_in_last_hour=dict(
145                 shares=my_share_count,
146                 unstale_shares=my_unstale_count,
147                 stale_shares=my_stale_count,
148                 orphan_stale_shares=my_orphan_count,
149                 doa_stale_shares=my_doa_count,
150             ),
151             my_stale_proportions_in_last_hour=dict(
152                 stale=my_stale_prop,
153                 orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
154                 dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
155             ),
156             miner_hash_rates=miner_hash_rates,
157             miner_dead_hash_rates=miner_dead_hash_rates,
158             efficiency_if_miner_perfect=(1 - stale_orphan_shares/shares)/(1 - global_stale_prop) if shares else None, # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection
159         )
160     
161     class WebInterface(resource.Resource):
162         def __init__(self, func, mime_type='application/json', args=()):
163             resource.Resource.__init__(self)
164             self.func, self.mime_type, self.args = func, mime_type, args
165         
166         def getChild(self, child, request):
167             return WebInterface(self.func, self.mime_type, self.args + (child,))
168         
169         def render_GET(self, request):
170             request.setHeader('Content-Type', self.mime_type)
171             request.setHeader('Access-Control-Allow-Origin', '*')
172             res = self.func(*self.args)
173             return json.dumps(res) if self.mime_type == 'application/json' else res
174     
175     web_root.putChild('rate', WebInterface(lambda: p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)/(1-p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))))
176     web_root.putChild('difficulty', WebInterface(lambda: bitcoin_data.target_to_difficulty(tracker.shares[current_work.value['best_share_hash']].max_target)))
177     web_root.putChild('users', WebInterface(get_users))
178     web_root.putChild('fee', WebInterface(lambda: worker_fee))
179     web_root.putChild('current_payouts', WebInterface(lambda: dict((bitcoin_data.script2_to_address(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems())))
180     web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain'))
181     web_root.putChild('global_stats', WebInterface(get_global_stats))
182     web_root.putChild('local_stats', WebInterface(get_local_stats))
183     web_root.putChild('peer_addresses', WebInterface(lambda: ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues()), 'text/plain'))
184     web_root.putChild('peer_versions', WebInterface(lambda: ''.join('%s:%i ' % peer.addr + peer.other_sub_version + '\n' for peer in p2p_node.peers.itervalues()), 'text/plain'))
185     web_root.putChild('payout_addr', WebInterface(lambda: bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)))
186     web_root.putChild('recent_blocks', WebInterface(lambda: recent_blocks))
187     web_root.putChild('uptime', WebInterface(lambda: time.time() - start_time))
188     
189     new_root = resource.Resource()
190     web_root.putChild('web', new_root)
191     
192     stat_log = []
193     if os.path.exists(os.path.join(datadir_path, 'stats')):
194         try:
195             with open(os.path.join(datadir_path, 'stats'), 'rb') as f:
196                 stat_log = json.loads(f.read())
197         except:
198             log.err(None, 'Error loading stats:')
199     def update_stat_log():
200         while stat_log and stat_log[0]['time'] < time.time() - 24*60*60:
201             stat_log.pop(0)
202         
203         lookbehind = 3600//net.SHARE_PERIOD
204         if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
205             return None
206         
207         global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
208         (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
209         
210         miner_hash_rates = {}
211         miner_dead_hash_rates = {}
212         datums, dt = local_rate_monitor.get_datums_in_last()
213         for datum in datums:
214             miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
215             if datum['dead']:
216                 miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
217         
218         stat_log.append(dict(
219             time=time.time(),
220             pool_hash_rate=p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)/(1-global_stale_prop),
221             pool_stale_prop=global_stale_prop,
222             local_hash_rates=miner_hash_rates,
223             local_dead_hash_rates=miner_dead_hash_rates,
224             shares=shares,
225             stale_shares=stale_orphan_shares + stale_doa_shares,
226             stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares),
227             current_payout=get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8,
228             peers=dict(
229                 incoming=sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
230                 outgoing=sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming),
231             ),
232             attempts_to_share=bitcoin_data.target_to_average_attempts(tracker.shares[current_work.value['best_share_hash']].max_target),
233             attempts_to_block=bitcoin_data.target_to_average_attempts(current_work.value['bits'].target),
234             block_value=current_work2.value['subsidy']*1e-8,
235         ))
236         
237         with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
238             f.write(json.dumps(stat_log))
239     task.LoopingCall(update_stat_log).start(5*60)
240     new_root.putChild('log', WebInterface(lambda: stat_log))
241     
242     def get_share(share_hash_str):
243         if int(share_hash_str, 16) not in tracker.shares:
244             return None
245         share = tracker.shares[int(share_hash_str, 16)]
246         
247         return dict(
248             parent='%064x' % share.previous_hash,
249             children=['%064x' % x for x in sorted(tracker.reverse_shares.get(share.hash, set()), key=lambda sh: -len(tracker.reverse_shares.get(sh, set())))], # sorted from most children to least children
250             local=dict(
251                 verified=share.hash in tracker.verified.shares,
252                 time_first_seen=start_time if share.time_seen == 0 else share.time_seen,
253                 peer_first_received_from=share.peer.addr if share.peer is not None else None,
254             ),
255             share_data=dict(
256                 timestamp=share.timestamp,
257                 target=share.target,
258                 max_target=share.max_target,
259                 payout_address=bitcoin_data.script2_to_address(share.new_script, net.PARENT),
260                 donation=share.share_data['donation']/65535,
261                 stale_info=share.share_data['stale_info'],
262                 nonce=share.share_data['nonce'],
263             ),
264             block=dict(
265                 hash='%064x' % share.header_hash,
266                 header=dict(
267                     version=share.header['version'],
268                     previous_block='%064x' % share.header['previous_block'],
269                     merkle_root='%064x' % share.header['merkle_root'],
270                     timestamp=share.header['timestamp'],
271                     target=share.header['bits'].target,
272                     nonce=share.header['nonce'],
273                 ),
274                 gentx=dict(
275                     hash='%064x' % share.gentx_hash,
276                     coinbase=share.share_data['coinbase'].ljust(2, '\x00').encode('hex'),
277                     value=share.share_data['subsidy']*1e-8,
278                 ),
279                 txn_count_range=[len(share.other_txs), len(share.other_txs)] if share.other_txs is not None else 1 if len(share.merkle_link['branch']) == 0 else [2**len(share.merkle_link['branch'])//2+1, 2**len(share.merkle_link['branch'])],
280             ),
281         )
282     new_root.putChild('share', WebInterface(lambda share_hash_str: get_share(share_hash_str)))
283     new_root.putChild('heads', WebInterface(lambda: ['%064x' % x for x in tracker.heads]))
284     new_root.putChild('verified_heads', WebInterface(lambda: ['%064x' % x for x in tracker.verified.heads]))
285     new_root.putChild('tails', WebInterface(lambda: ['%064x' % x for t in tracker.tails for x in tracker.reverse_shares.get(t, set())]))
286     new_root.putChild('verified_tails', WebInterface(lambda: ['%064x' % x for t in tracker.verified.tails for x in tracker.verified.reverse_shares.get(t, set())]))
287     new_root.putChild('best_share_hash', WebInterface(lambda: '%064x' % current_work.value['best_share_hash']))
288     
289     class Explorer(resource.Resource):
290         def render_GET(self, request):
291             return 'moved to /static/'
292         def getChild(self, child, request):
293             return self
294     new_root.putChild('explorer', Explorer())
295     
296     hd_path = os.path.join(datadir_path, 'graph_db')
297     hd_data = _atomic_read(hd_path)
298     hd_obj = {}
299     if hd_data is not None:
300         try:
301             hd_obj = json.loads(hd_data)
302         except Exception:
303             log.err(None, 'Error reading graph database:')
304     dataview_descriptions = {
305         'last_hour': graph.DataViewDescription(150, 60*60),
306         'last_day': graph.DataViewDescription(300, 60*60*24),
307         'last_week': graph.DataViewDescription(300, 60*60*24*7),
308         'last_month': graph.DataViewDescription(300, 60*60*24*30),
309         'last_year': graph.DataViewDescription(300, 60*60*24*365.25),
310     }
311     def combine_and_keep_largest(*dicts):
312         res = {}
313         for d in dicts:
314             for k, v in d.iteritems():
315                 res[k] = res.get(k, 0) + v
316         return dict((k, v) for k, v in sorted(res.iteritems(), key=lambda (k, v): v)[-30:] if v)
317     hd = graph.HistoryDatabase.from_obj({
318         'local_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
319         'local_dead_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
320         'local_share_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
321         'local_dead_share_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
322         'pool_rate': graph.DataStreamDescription(True, dataview_descriptions),
323         'pool_stale_rate': graph.DataStreamDescription(True, dataview_descriptions),
324         'current_payout': graph.DataStreamDescription(True, dataview_descriptions),
325         'incoming_peers': graph.DataStreamDescription(True, dataview_descriptions),
326         'outgoing_peers': graph.DataStreamDescription(True, dataview_descriptions),
327         'miner_hash_rates': graph.DataStreamDescription(False, dataview_descriptions, {}, combine_and_keep_largest, math.mult_dict),
328         'miner_dead_hash_rates': graph.DataStreamDescription(False, dataview_descriptions, {}, combine_and_keep_largest, math.mult_dict),
329     }, hd_obj)
330     task.LoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj()))).start(100)
331     @pseudoshare_received.watch
332     def _(work, dead, user, had_vip_pass):
333         t = time.time()
334         hd.datastreams['local_hash_rate'].add_datum(t, work)
335         if dead:
336             hd.datastreams['local_dead_hash_rate'].add_datum(t, work)
337         if user is not None:
338             hd.datastreams['miner_hash_rates'].add_datum(t, {user: work})
339             if dead:
340                 hd.datastreams['miner_dead_hash_rates'].add_datum(t, {user: work})
341     @share_received.watch
342     def _(work, dead):
343         t = time.time()
344         hd.datastreams['local_share_hash_rate'].add_datum(t, work)
345         if dead:
346             hd.datastreams['local_dead_share_hash_rate'].add_datum(t, work)
347     def add_point():
348         if tracker.get_height(current_work.value['best_share_hash']) < 720:
349             return
350         nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
351         poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
352         t = time.time()
353         hd.datastreams['pool_rate'].add_datum(t, poolrate)
354         hd.datastreams['pool_stale_rate'].add_datum(t, poolrate - nonstalerate)
355         hd.datastreams['current_payout'].add_datum(t, get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8)
356         hd.datastreams['incoming_peers'].add_datum(t, sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming))
357         hd.datastreams['outgoing_peers'].add_datum(t, sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming))
358     task.LoopingCall(add_point).start(5)
359     new_root.putChild('graph_data', WebInterface(lambda source, view: hd.datastreams[source].dataviews[view].get_data(time.time())))
360     
361     web_root.putChild('static', static.File(os.path.join(os.path.dirname(sys.argv[0]), 'web-static')))
362     
363     return web_root