moved all web related stuff into p2pool.web
[p2pool.git] / p2pool / web.py
1 import json
2 import os
3 import time
4
5 from twisted.internet import task
6 from twisted.python import log
7 from twisted.web import resource
8
9 from bitcoin import data as bitcoin_data
10 from . import data as p2pool_data, graphs
11 from util import math
12
13 def get_web_root(tracker, current_work, current_work2, get_current_txouts, datadir_path, net, get_stale_counts, my_pubkey_hash, local_rate_monitor, worker_fee, p2p_node, my_share_hashes, recent_blocks):
14     start_time = time.time()
15     
16     web_root = resource.Resource()
17     
18     def get_rate():
19         if tracker.get_height(current_work.value['best_share_hash']) < 720:
20             return json.dumps(None)
21         return json.dumps(p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
22             / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720)))
23     
24     def get_users():
25         height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
26         weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
27         res = {}
28         for script in sorted(weights, key=lambda s: weights[s]):
29             res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
30         return json.dumps(res)
31     
32     def get_current_scaled_txouts(scale, trunc=0):
33         txouts = get_current_txouts()
34         total = sum(txouts.itervalues())
35         results = dict((script, value*scale//total) for script, value in txouts.iteritems())
36         if trunc > 0:
37             total_random = 0
38             random_set = set()
39             for s in sorted(results, key=results.__getitem__):
40                 if results[s] >= trunc:
41                     break
42                 total_random += results[s]
43                 random_set.add(s)
44             if total_random:
45                 winner = math.weighted_choice((script, results[script]) for script in random_set)
46                 for script in random_set:
47                     del results[script]
48                 results[winner] = total_random
49         if sum(results.itervalues()) < int(scale):
50             results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
51         return results
52     
53     def get_current_payouts():
54         return json.dumps(dict((bitcoin_data.script2_to_human(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems()))
55     
56     def get_patron_sendmany(this):
57         try:
58             if '/' in this:
59                 this, trunc = this.split('/', 1)
60             else:
61                 trunc = '0.01'
62             return json.dumps(dict(
63                 (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
64                 for script, value in get_current_scaled_txouts(scale=int(float(this)*1e8), trunc=int(float(trunc)*1e8)).iteritems()
65                 if bitcoin_data.script2_to_address(script, net.PARENT) is not None
66             ))
67         except:
68             log.err()
69             return json.dumps(None)
70     
71     def get_global_stats():
72         # averaged over last hour
73         lookbehind = 3600//net.SHARE_PERIOD
74         if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
75             return None
76         
77         nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
78         stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
79         return json.dumps(dict(
80             pool_nonstale_hash_rate=nonstale_hash_rate,
81             pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
82             pool_stale_prop=stale_prop,
83         ))
84     
85     def get_local_stats():
86         lookbehind = 3600//net.SHARE_PERIOD
87         if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
88             return None
89         
90         global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
91         
92         my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
93         my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
94         my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
95         my_share_count = my_unstale_count + my_orphan_count + my_doa_count
96         my_stale_count = my_orphan_count + my_doa_count
97         
98         my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
99         
100         my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
101             for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1)
102             if share.hash in my_share_hashes)
103         actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp -
104             tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp)
105         share_att_s = my_work / actual_time
106         
107         miner_hash_rates = {}
108         miner_dead_hash_rates = {}
109         datums, dt = local_rate_monitor.get_datums_in_last()
110         for datum in datums:
111             miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
112             if datum['dead']:
113                 miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
114         
115         return json.dumps(dict(
116             my_hash_rates_in_last_hour=dict(
117                 note="DEPRECATED",
118                 nonstale=share_att_s,
119                 rewarded=share_att_s/(1 - global_stale_prop),
120                 actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
121             ),
122             my_share_counts_in_last_hour=dict(
123                 shares=my_share_count,
124                 unstale_shares=my_unstale_count,
125                 stale_shares=my_stale_count,
126                 orphan_stale_shares=my_orphan_count,
127                 doa_stale_shares=my_doa_count,
128             ),
129             my_stale_proportions_in_last_hour=dict(
130                 stale=my_stale_prop,
131                 orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
132                 dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
133             ),
134             miner_hash_rates=miner_hash_rates,
135             miner_dead_hash_rates=miner_dead_hash_rates,
136         ))
137     
138     def get_peer_addresses():
139         return ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues())
140     
141     def get_uptime():
142         return json.dumps(time.time() - start_time)
143     
144     class WebInterface(resource.Resource):
145         def __init__(self, func, mime_type, *fields):
146             self.func, self.mime_type, self.fields = func, mime_type, fields
147         
148         def render_GET(self, request):
149             request.setHeader('Content-Type', self.mime_type)
150             request.setHeader('Access-Control-Allow-Origin', '*')
151             return self.func(*(request.args[field][0] for field in self.fields))
152     
153     web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
154     web_root.putChild('users', WebInterface(get_users, 'application/json'))
155     web_root.putChild('fee', WebInterface(lambda: json.dumps(worker_fee), 'application/json'))
156     web_root.putChild('current_payouts', WebInterface(get_current_payouts, 'application/json'))
157     web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain', 'total'))
158     web_root.putChild('global_stats', WebInterface(get_global_stats, 'application/json'))
159     web_root.putChild('local_stats', WebInterface(get_local_stats, 'application/json'))
160     web_root.putChild('peer_addresses', WebInterface(get_peer_addresses, 'text/plain'))
161     web_root.putChild('payout_addr', WebInterface(lambda: json.dumps(bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)), 'application/json'))
162     web_root.putChild('recent_blocks', WebInterface(lambda: json.dumps(recent_blocks), 'application/json'))
163     web_root.putChild('uptime', WebInterface(get_uptime, 'application/json'))
164     
165     try:
166         from . import draw
167         web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
168     except ImportError:
169         print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
170     
171     new_root = resource.Resource()
172     web_root.putChild('web', new_root)
173     
174     stat_log = []
175     if os.path.exists(os.path.join(datadir_path, 'stats')):
176         try:
177             with open(os.path.join(datadir_path, 'stats'), 'rb') as f:
178                 stat_log = json.loads(f.read())
179         except:
180             log.err(None, 'Error loading stats:')
181     def update_stat_log():
182         while stat_log and stat_log[0]['time'] < time.time() - 24*60*60:
183             stat_log.pop(0)
184         
185         lookbehind = 3600//net.SHARE_PERIOD
186         if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
187             return None
188         
189         global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
190         (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
191         
192         miner_hash_rates = {}
193         miner_dead_hash_rates = {}
194         datums, dt = local_rate_monitor.get_datums_in_last()
195         for datum in datums:
196             miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
197             if datum['dead']:
198                 miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
199         
200         stat_log.append(dict(
201             time=time.time(),
202             pool_hash_rate=p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)/(1-global_stale_prop),
203             pool_stale_prop=global_stale_prop,
204             local_hash_rates=miner_hash_rates,
205             local_dead_hash_rates=miner_dead_hash_rates,
206             shares=shares,
207             stale_shares=stale_orphan_shares + stale_doa_shares,
208             stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares),
209             current_payout=get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8,
210             peers=dict(
211                 incoming=sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
212                 outgoing=sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming),
213             ),
214             attempts_to_share=bitcoin_data.target_to_average_attempts(tracker.shares[current_work.value['best_share_hash']].max_target),
215             attempts_to_block=bitcoin_data.target_to_average_attempts(current_work.value['bits'].target),
216             block_value=current_work2.value['subsidy']*1e-8,
217         ))
218         
219         with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
220             f.write(json.dumps(stat_log))
221     task.LoopingCall(update_stat_log).start(5*60)
222     new_root.putChild('log', WebInterface(lambda: json.dumps(stat_log), 'application/json'))
223     
224     grapher = graphs.Grapher(os.path.join(datadir_path, 'rrd'))
225     web_root.putChild('graphs', grapher.get_resource())
226     def add_point():
227         if tracker.get_height(current_work.value['best_share_hash']) < 720:
228             return
229         nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
230         poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
231         grapher.add_poolrate_point(poolrate, poolrate - nonstalerate)
232     task.LoopingCall(add_point).start(100)
233     
234     return web_root