1 from __future__ import division
9 from twisted.internet import task
10 from twisted.python import log
11 from twisted.web import resource, static
13 from bitcoin import data as bitcoin_data
14 from . import data as p2pool_data
15 from util import graph, math
17 def _atomic_read(filename):
19 with open(filename, 'rb') as f:
22 if e.errno != errno.ENOENT:
25 with open(filename + '.new', 'rb') as f:
28 if e.errno != errno.ENOENT:
32 def _atomic_write(filename, data):
33 with open(filename + '.new', 'wb') as f:
41 os.rename(filename + '.new', filename)
42 except os.error: # windows can't overwrite
44 os.rename(filename + '.new', filename)
46 def get_web_root(tracker, current_work, current_work2, get_current_txouts, datadir_path, net, get_stale_counts, my_pubkey_hash, local_rate_monitor, worker_fee, p2p_node, my_share_hashes, pseudoshare_received, share_received):
47 start_time = time.time()
49 web_root = resource.Resource()
52 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
53 weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
55 for script in sorted(weights, key=lambda s: weights[s]):
56 res[bitcoin_data.script2_to_address(script, net.PARENT)] = weights[script]/total_weight
59 def get_current_scaled_txouts(scale, trunc=0):
60 txouts = get_current_txouts()
61 total = sum(txouts.itervalues())
62 results = dict((script, value*scale//total) for script, value in txouts.iteritems())
66 for s in sorted(results, key=results.__getitem__):
67 if results[s] >= trunc:
69 total_random += results[s]
72 winner = math.weighted_choice((script, results[script]) for script in random_set)
73 for script in random_set:
75 results[winner] = total_random
76 if sum(results.itervalues()) < int(scale):
77 results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
80 def get_patron_sendmany(total=None, trunc='0.01'):
82 return 'need total argument. go to patron_sendmany/<TOTAL>'
83 total = int(float(total)*1e8)
84 trunc = int(float(trunc)*1e8)
86 (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
87 for script, value in get_current_scaled_txouts(total, trunc).iteritems()
88 if bitcoin_data.script2_to_address(script, net.PARENT) is not None
91 def get_local_rates():
93 miner_dead_hash_rates = {}
94 datums, dt = local_rate_monitor.get_datums_in_last()
96 miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
98 miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
99 return miner_hash_rates, miner_dead_hash_rates
101 def get_global_stats():
102 # averaged over last hour
103 lookbehind = 3600//net.SHARE_PERIOD
104 if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
107 nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
108 stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
110 pool_nonstale_hash_rate=nonstale_hash_rate,
111 pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
112 pool_stale_prop=stale_prop,
113 min_difficulty=bitcoin_data.target_to_difficulty(tracker.shares[current_work.value['best_share_hash']].max_target),
116 def get_local_stats():
117 lookbehind = 3600//net.SHARE_PERIOD
118 if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
121 global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
123 my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
124 my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
125 my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
126 my_share_count = my_unstale_count + my_orphan_count + my_doa_count
127 my_stale_count = my_orphan_count + my_doa_count
129 my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
131 my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
132 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1)
133 if share.hash in my_share_hashes)
134 actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp -
135 tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp)
136 share_att_s = my_work / actual_time
138 miner_hash_rates, miner_dead_hash_rates = get_local_rates()
139 (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
142 my_hash_rates_in_last_hour=dict(
144 nonstale=share_att_s,
145 rewarded=share_att_s/(1 - global_stale_prop),
146 actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
148 my_share_counts_in_last_hour=dict(
149 shares=my_share_count,
150 unstale_shares=my_unstale_count,
151 stale_shares=my_stale_count,
152 orphan_stale_shares=my_orphan_count,
153 doa_stale_shares=my_doa_count,
155 my_stale_proportions_in_last_hour=dict(
157 orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
158 dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
160 miner_hash_rates=miner_hash_rates,
161 miner_dead_hash_rates=miner_dead_hash_rates,
162 efficiency_if_miner_perfect=(1 - stale_orphan_shares/shares)/(1 - global_stale_prop) if shares else None, # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection
163 efficiency=(1 - (stale_orphan_shares+stale_doa_shares)/shares)/(1 - global_stale_prop) if shares else None,
165 incoming=sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
166 outgoing=sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming),
170 orphan=stale_orphan_shares,
171 dead=stale_doa_shares,
173 uptime=time.time() - start_time,
174 block_value=current_work2.value['subsidy']*1e-8,
177 class WebInterface(resource.Resource):
178 def __init__(self, func, mime_type='application/json', args=()):
179 resource.Resource.__init__(self)
180 self.func, self.mime_type, self.args = func, mime_type, args
182 def getChild(self, child, request):
183 return WebInterface(self.func, self.mime_type, self.args + (child,))
185 def render_GET(self, request):
186 request.setHeader('Content-Type', self.mime_type)
187 request.setHeader('Access-Control-Allow-Origin', '*')
188 res = self.func(*self.args)
189 return json.dumps(res) if self.mime_type == 'application/json' else res
191 web_root.putChild('rate', WebInterface(lambda: p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)/(1-p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))))
192 web_root.putChild('difficulty', WebInterface(lambda: bitcoin_data.target_to_difficulty(tracker.shares[current_work.value['best_share_hash']].max_target)))
193 web_root.putChild('users', WebInterface(get_users))
194 web_root.putChild('fee', WebInterface(lambda: worker_fee))
195 web_root.putChild('current_payouts', WebInterface(lambda: dict((bitcoin_data.script2_to_address(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems())))
196 web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain'))
197 web_root.putChild('global_stats', WebInterface(get_global_stats))
198 web_root.putChild('local_stats', WebInterface(get_local_stats))
199 web_root.putChild('peer_addresses', WebInterface(lambda: ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues()), 'text/plain'))
200 web_root.putChild('peer_versions', WebInterface(lambda: ''.join('%s:%i ' % peer.addr + peer.other_sub_version + '\n' for peer in p2p_node.peers.itervalues()), 'text/plain'))
201 web_root.putChild('payout_addr', WebInterface(lambda: bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)))
202 web_root.putChild('recent_blocks', WebInterface(lambda: [dict(ts=s.timestamp, hash='%064x' % s.header_hash) for s in tracker.get_chain(current_work.value['best_share_hash'], 24*60*60//net.SHARE_PERIOD) if s.pow_hash <= s.header['bits'].target]))
203 web_root.putChild('uptime', WebInterface(lambda: time.time() - start_time))
205 new_root = resource.Resource()
206 web_root.putChild('web', new_root)
209 if os.path.exists(os.path.join(datadir_path, 'stats')):
211 with open(os.path.join(datadir_path, 'stats'), 'rb') as f:
212 stat_log = json.loads(f.read())
214 log.err(None, 'Error loading stats:')
215 def update_stat_log():
216 while stat_log and stat_log[0]['time'] < time.time() - 24*60*60:
219 lookbehind = 3600//net.SHARE_PERIOD
220 if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
223 global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
224 (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
225 miner_hash_rates, miner_dead_hash_rates = get_local_rates()
227 stat_log.append(dict(
229 pool_hash_rate=p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)/(1-global_stale_prop),
230 pool_stale_prop=global_stale_prop,
231 local_hash_rates=miner_hash_rates,
232 local_dead_hash_rates=miner_dead_hash_rates,
234 stale_shares=stale_orphan_shares + stale_doa_shares,
235 stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares),
236 current_payout=get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8,
238 incoming=sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
239 outgoing=sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming),
241 attempts_to_share=bitcoin_data.target_to_average_attempts(tracker.shares[current_work.value['best_share_hash']].max_target),
242 attempts_to_block=bitcoin_data.target_to_average_attempts(current_work.value['bits'].target),
243 block_value=current_work2.value['subsidy']*1e-8,
246 with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
247 f.write(json.dumps(stat_log))
248 task.LoopingCall(update_stat_log).start(5*60)
249 new_root.putChild('log', WebInterface(lambda: stat_log))
251 def get_share(share_hash_str):
252 if int(share_hash_str, 16) not in tracker.shares:
254 share = tracker.shares[int(share_hash_str, 16)]
257 parent='%064x' % share.previous_hash,
258 children=['%064x' % x for x in sorted(tracker.reverse_shares.get(share.hash, set()), key=lambda sh: -len(tracker.reverse_shares.get(sh, set())))], # sorted from most children to least children
260 verified=share.hash in tracker.verified.shares,
261 time_first_seen=start_time if share.time_seen == 0 else share.time_seen,
262 peer_first_received_from=share.peer.addr if share.peer is not None else None,
265 timestamp=share.timestamp,
267 max_target=share.max_target,
268 payout_address=bitcoin_data.script2_to_address(share.new_script, net.PARENT),
269 donation=share.share_data['donation']/65535,
270 stale_info=share.share_data['stale_info'],
271 nonce=share.share_data['nonce'],
274 hash='%064x' % share.header_hash,
276 version=share.header['version'],
277 previous_block='%064x' % share.header['previous_block'],
278 merkle_root='%064x' % share.header['merkle_root'],
279 timestamp=share.header['timestamp'],
280 target=share.header['bits'].target,
281 nonce=share.header['nonce'],
284 hash='%064x' % share.gentx_hash,
285 coinbase=share.share_data['coinbase'].ljust(2, '\x00').encode('hex'),
286 value=share.share_data['subsidy']*1e-8,
288 txn_count_range=[len(share.other_txs), len(share.other_txs)] if share.other_txs is not None else 1 if len(share.merkle_link['branch']) == 0 else [2**len(share.merkle_link['branch'])//2+1, 2**len(share.merkle_link['branch'])],
291 new_root.putChild('share', WebInterface(lambda share_hash_str: get_share(share_hash_str)))
292 new_root.putChild('heads', WebInterface(lambda: ['%064x' % x for x in tracker.heads]))
293 new_root.putChild('verified_heads', WebInterface(lambda: ['%064x' % x for x in tracker.verified.heads]))
294 new_root.putChild('tails', WebInterface(lambda: ['%064x' % x for t in tracker.tails for x in tracker.reverse_shares.get(t, set())]))
295 new_root.putChild('verified_tails', WebInterface(lambda: ['%064x' % x for t in tracker.verified.tails for x in tracker.verified.reverse_shares.get(t, set())]))
296 new_root.putChild('best_share_hash', WebInterface(lambda: '%064x' % current_work.value['best_share_hash']))
298 class Explorer(resource.Resource):
299 def render_GET(self, request):
300 return 'moved to /static/'
301 def getChild(self, child, request):
303 new_root.putChild('explorer', Explorer())
305 hd_path = os.path.join(datadir_path, 'graph_db')
306 hd_data = _atomic_read(hd_path)
308 if hd_data is not None:
310 hd_obj = json.loads(hd_data)
312 log.err(None, 'Error reading graph database:')
313 dataview_descriptions = {
314 'last_hour': graph.DataViewDescription(150, 60*60),
315 'last_day': graph.DataViewDescription(300, 60*60*24),
316 'last_week': graph.DataViewDescription(300, 60*60*24*7),
317 'last_month': graph.DataViewDescription(300, 60*60*24*30),
318 'last_year': graph.DataViewDescription(300, 60*60*24*365.25),
320 hd = graph.HistoryDatabase.from_obj({
321 'local_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
322 'local_dead_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
323 'local_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
324 'local_dead_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
325 'pool_rate': graph.DataStreamDescription(dataview_descriptions),
326 'pool_stale_rate': graph.DataStreamDescription(dataview_descriptions),
327 'current_payout': graph.DataStreamDescription(dataview_descriptions),
328 'current_payouts': graph.DataStreamDescription(dataview_descriptions, multivalues=True),
329 'incoming_peers': graph.DataStreamDescription(dataview_descriptions),
330 'outgoing_peers': graph.DataStreamDescription(dataview_descriptions),
331 'miner_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
332 'miner_dead_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
334 task.LoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj()))).start(100)
335 @pseudoshare_received.watch
336 def _(work, dead, user):
338 hd.datastreams['local_hash_rate'].add_datum(t, work)
340 hd.datastreams['local_dead_hash_rate'].add_datum(t, work)
342 hd.datastreams['miner_hash_rates'].add_datum(t, {user: work})
344 hd.datastreams['miner_dead_hash_rates'].add_datum(t, {user: work})
345 @share_received.watch
348 hd.datastreams['local_share_hash_rate'].add_datum(t, work)
350 hd.datastreams['local_dead_share_hash_rate'].add_datum(t, work)
352 if tracker.get_height(current_work.value['best_share_hash']) < 720:
354 nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
355 poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
357 hd.datastreams['pool_rate'].add_datum(t, poolrate)
358 hd.datastreams['pool_stale_rate'].add_datum(t, poolrate - nonstalerate)
359 current_txouts = get_current_txouts()
360 hd.datastreams['current_payout'].add_datum(t, current_txouts.get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8)
361 miner_hash_rates, miner_dead_hash_rates = get_local_rates()
362 current_txouts_by_address = dict((bitcoin_data.script2_to_address(script, net.PARENT), amount) for script, amount in current_txouts.iteritems())
363 hd.datastreams['current_payouts'].add_datum(t, dict((user, current_txouts_by_address[user]*1e-8) for user in miner_hash_rates if user in current_txouts_by_address))
364 hd.datastreams['incoming_peers'].add_datum(t, sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming))
365 hd.datastreams['outgoing_peers'].add_datum(t, sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming))
366 task.LoopingCall(add_point).start(5)
367 new_root.putChild('graph_data', WebInterface(lambda source, view: hd.datastreams[source].dataviews[view].get_data(time.time())))
369 web_root.putChild('static', static.File(os.path.join(os.path.dirname(sys.argv[0]), 'web-static')))