fixed memory leak of peer objects due Share objects holding a reference to the origin...
[p2pool.git] / p2pool / web.py
1 from __future__ import division
2
3 import errno
4 import json
5 import os
6 import sys
7 import time
8 import traceback
9
10 from twisted.internet import defer, task
11 from twisted.python import log
12 from twisted.web import resource, static
13
14 import p2pool
15 from bitcoin import data as bitcoin_data
16 from . import data as p2pool_data
17 from util import deferred_resource, graph, math, memory, pack, variable
18
19 def _atomic_read(filename):
20     try:
21         with open(filename, 'rb') as f:
22             return f.read()
23     except IOError, e:
24         if e.errno != errno.ENOENT:
25             raise
26     try:
27         with open(filename + '.new', 'rb') as f:
28             return f.read()
29     except IOError, e:
30         if e.errno != errno.ENOENT:
31             raise
32     return None
33
34 def _atomic_write(filename, data):
35     with open(filename + '.new', 'wb') as f:
36         f.write(data)
37         f.flush()
38         try:
39             os.fsync(f.fileno())
40         except:
41             pass
42     try:
43         os.rename(filename + '.new', filename)
44     except: # XXX windows can't overwrite
45         os.remove(filename)
46         os.rename(filename + '.new', filename)
47
48 def get_web_root(wb, datadir_path, bitcoind_warning_var, stop_event=variable.Event()):
49     node = wb.node
50     start_time = time.time()
51     
52     web_root = resource.Resource()
53     
54     def get_users():
55         height, last = node.tracker.get_height_and_last(node.best_share_var.value)
56         weights, total_weight, donation_weight = node.tracker.get_cumulative_weights(node.best_share_var.value, min(height, 720), 65535*2**256)
57         res = {}
58         for script in sorted(weights, key=lambda s: weights[s]):
59             res[bitcoin_data.script2_to_address(script, node.net.PARENT)] = weights[script]/total_weight
60         return res
61     
62     def get_current_scaled_txouts(scale, trunc=0):
63         txouts = node.get_current_txouts()
64         total = sum(txouts.itervalues())
65         results = dict((script, value*scale//total) for script, value in txouts.iteritems())
66         if trunc > 0:
67             total_random = 0
68             random_set = set()
69             for s in sorted(results, key=results.__getitem__):
70                 if results[s] >= trunc:
71                     break
72                 total_random += results[s]
73                 random_set.add(s)
74             if total_random:
75                 winner = math.weighted_choice((script, results[script]) for script in random_set)
76                 for script in random_set:
77                     del results[script]
78                 results[winner] = total_random
79         if sum(results.itervalues()) < int(scale):
80             results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
81         return results
82     
83     def get_patron_sendmany(total=None, trunc='0.01'):
84         if total is None:
85             return 'need total argument. go to patron_sendmany/<TOTAL>'
86         total = int(float(total)*1e8)
87         trunc = int(float(trunc)*1e8)
88         return json.dumps(dict(
89             (bitcoin_data.script2_to_address(script, node.net.PARENT), value/1e8)
90             for script, value in get_current_scaled_txouts(total, trunc).iteritems()
91             if bitcoin_data.script2_to_address(script, node.net.PARENT) is not None
92         ))
93     
94     def get_local_rates():
95         miner_hash_rates = {}
96         miner_dead_hash_rates = {}
97         datums, dt = wb.local_rate_monitor.get_datums_in_last()
98         for datum in datums:
99             miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
100             if datum['dead']:
101                 miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
102         return miner_hash_rates, miner_dead_hash_rates
103     
104     def get_global_stats():
105         # averaged over last hour
106         if node.tracker.get_height(node.best_share_var.value) < 10:
107             return None
108         lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600//node.net.SHARE_PERIOD)
109         
110         nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, lookbehind)
111         stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind)
112         return dict(
113             pool_nonstale_hash_rate=nonstale_hash_rate,
114             pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
115             pool_stale_prop=stale_prop,
116             min_difficulty=bitcoin_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target),
117         )
118     
119     def get_local_stats():
120         if node.tracker.get_height(node.best_share_var.value) < 10:
121             return None
122         lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600//node.net.SHARE_PERIOD)
123         
124         global_stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind)
125         
126         my_unstale_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes)
127         my_orphan_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'orphan')
128         my_doa_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'doa')
129         my_share_count = my_unstale_count + my_orphan_count + my_doa_count
130         my_stale_count = my_orphan_count + my_doa_count
131         
132         my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
133         
134         my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
135             for share in node.tracker.get_chain(node.best_share_var.value, lookbehind - 1)
136             if share.hash in wb.my_share_hashes)
137         actual_time = (node.tracker.items[node.best_share_var.value].timestamp -
138             node.tracker.items[node.tracker.get_nth_parent_hash(node.best_share_var.value, lookbehind - 1)].timestamp)
139         share_att_s = my_work / actual_time
140         
141         miner_hash_rates, miner_dead_hash_rates = get_local_rates()
142         (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()
143         
144         return dict(
145             my_hash_rates_in_last_hour=dict(
146                 note="DEPRECATED",
147                 nonstale=share_att_s,
148                 rewarded=share_att_s/(1 - global_stale_prop),
149                 actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
150             ),
151             my_share_counts_in_last_hour=dict(
152                 shares=my_share_count,
153                 unstale_shares=my_unstale_count,
154                 stale_shares=my_stale_count,
155                 orphan_stale_shares=my_orphan_count,
156                 doa_stale_shares=my_doa_count,
157             ),
158             my_stale_proportions_in_last_hour=dict(
159                 stale=my_stale_prop,
160                 orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
161                 dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
162             ),
163             miner_hash_rates=miner_hash_rates,
164             miner_dead_hash_rates=miner_dead_hash_rates,
165             efficiency_if_miner_perfect=(1 - stale_orphan_shares/shares)/(1 - global_stale_prop) if shares else None, # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection
166             efficiency=(1 - (stale_orphan_shares+stale_doa_shares)/shares)/(1 - global_stale_prop) if shares else None,
167             peers=dict(
168                 incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
169                 outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming),
170             ),
171             shares=dict(
172                 total=shares,
173                 orphan=stale_orphan_shares,
174                 dead=stale_doa_shares,
175             ),
176             uptime=time.time() - start_time,
177             attempts_to_share=bitcoin_data.target_to_average_attempts(node.tracker.items[node.best_share_var.value].max_target),
178             attempts_to_block=bitcoin_data.target_to_average_attempts(node.bitcoind_work.value['bits'].target),
179             block_value=node.bitcoind_work.value['subsidy']*1e-8,
180             warnings=p2pool_data.get_warnings(node.tracker, node.best_share_var.value, node.net, bitcoind_warning_var.value, node.bitcoind_work.value),
181             donation_proportion=wb.donation_percentage/100,
182         )
183     
184     class WebInterface(deferred_resource.DeferredResource):
185         def __init__(self, func, mime_type='application/json', args=()):
186             deferred_resource.DeferredResource.__init__(self)
187             self.func, self.mime_type, self.args = func, mime_type, args
188         
189         def getChild(self, child, request):
190             return WebInterface(self.func, self.mime_type, self.args + (child,))
191         
192         @defer.inlineCallbacks
193         def render_GET(self, request):
194             request.setHeader('Content-Type', self.mime_type)
195             request.setHeader('Access-Control-Allow-Origin', '*')
196             res = yield self.func(*self.args)
197             defer.returnValue(json.dumps(res) if self.mime_type == 'application/json' else res)
198     
199     def decent_height():
200         return min(node.tracker.get_height(node.best_share_var.value), 720)
201     web_root.putChild('rate', WebInterface(lambda: p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, decent_height())/(1-p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, decent_height()))))
202     web_root.putChild('difficulty', WebInterface(lambda: bitcoin_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target)))
203     web_root.putChild('users', WebInterface(get_users))
204     web_root.putChild('user_stales', WebInterface(lambda: dict((bitcoin_data.pubkey_hash_to_address(ph, node.net.PARENT), prop) for ph, prop in
205         p2pool_data.get_user_stale_props(node.tracker, node.best_share_var.value, node.tracker.get_height(node.best_share_var.value)).iteritems())))
206     web_root.putChild('fee', WebInterface(lambda: wb.worker_fee))
207     web_root.putChild('current_payouts', WebInterface(lambda: dict((bitcoin_data.script2_to_address(script, node.net.PARENT), value/1e8) for script, value in node.get_current_txouts().iteritems())))
208     web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain'))
209     web_root.putChild('global_stats', WebInterface(get_global_stats))
210     web_root.putChild('local_stats', WebInterface(get_local_stats))
211     web_root.putChild('peer_addresses', WebInterface(lambda: ['%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port) for peer in node.p2p_node.peers.itervalues()]))
212     web_root.putChild('peer_txpool_sizes', WebInterface(lambda: dict(('%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port), peer.remembered_txs_size) for peer in node.p2p_node.peers.itervalues())))
213     web_root.putChild('pings', WebInterface(defer.inlineCallbacks(lambda: defer.returnValue(
214         dict([(a, (yield b)) for a, b in
215             [(
216                 '%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port),
217                 defer.inlineCallbacks(lambda peer=peer: defer.returnValue(
218                     min([(yield peer.do_ping().addCallback(lambda x: x/0.001).addErrback(lambda fail: None)) for i in xrange(3)])
219                 ))()
220             ) for peer in list(node.p2p_node.peers.itervalues())]
221         ])
222     ))))
223     web_root.putChild('peer_versions', WebInterface(lambda: dict(('%s:%i' % peer.addr, peer.other_sub_version) for peer in node.p2p_node.peers.itervalues())))
224     web_root.putChild('payout_addr', WebInterface(lambda: bitcoin_data.pubkey_hash_to_address(wb.my_pubkey_hash, node.net.PARENT)))
225     web_root.putChild('recent_blocks', WebInterface(lambda: [dict(
226         ts=s.timestamp,
227         hash='%064x' % s.header_hash,
228         number=pack.IntType(24).unpack(s.share_data['coinbase'][1:4]) if len(s.share_data['coinbase']) >= 4 else None,
229         share='%064x' % s.hash,
230     ) for s in node.tracker.get_chain(node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 24*60*60//node.net.SHARE_PERIOD)) if s.pow_hash <= s.header['bits'].target]))
231     web_root.putChild('uptime', WebInterface(lambda: time.time() - start_time))
232     web_root.putChild('stale_rates', WebInterface(lambda: p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, decent_height(), rates=True)))
233     
234     new_root = resource.Resource()
235     web_root.putChild('web', new_root)
236     
237     stat_log = []
238     if os.path.exists(os.path.join(datadir_path, 'stats')):
239         try:
240             with open(os.path.join(datadir_path, 'stats'), 'rb') as f:
241                 stat_log = json.loads(f.read())
242         except:
243             log.err(None, 'Error loading stats:')
244     def update_stat_log():
245         while stat_log and stat_log[0]['time'] < time.time() - 24*60*60:
246             stat_log.pop(0)
247         
248         lookbehind = 3600//node.net.SHARE_PERIOD
249         if node.tracker.get_height(node.best_share_var.value) < lookbehind:
250             return None
251         
252         global_stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind)
253         (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()
254         miner_hash_rates, miner_dead_hash_rates = get_local_rates()
255         
256         stat_log.append(dict(
257             time=time.time(),
258             pool_hash_rate=p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, lookbehind)/(1-global_stale_prop),
259             pool_stale_prop=global_stale_prop,
260             local_hash_rates=miner_hash_rates,
261             local_dead_hash_rates=miner_dead_hash_rates,
262             shares=shares,
263             stale_shares=stale_orphan_shares + stale_doa_shares,
264             stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares),
265             current_payout=node.get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(wb.my_pubkey_hash), 0)*1e-8,
266             peers=dict(
267                 incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
268                 outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming),
269             ),
270             attempts_to_share=bitcoin_data.target_to_average_attempts(node.tracker.items[node.best_share_var.value].max_target),
271             attempts_to_block=bitcoin_data.target_to_average_attempts(node.bitcoind_work.value['bits'].target),
272             block_value=node.bitcoind_work.value['subsidy']*1e-8,
273         ))
274         
275         with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
276             f.write(json.dumps(stat_log))
277     x = task.LoopingCall(update_stat_log)
278     x.start(5*60)
279     stop_event.watch(x.stop)
280     new_root.putChild('log', WebInterface(lambda: stat_log))
281     
282     def get_share(share_hash_str):
283         if int(share_hash_str, 16) not in node.tracker.items:
284             return None
285         share = node.tracker.items[int(share_hash_str, 16)]
286         
287         return dict(
288             parent='%064x' % share.previous_hash,
289             children=['%064x' % x for x in sorted(node.tracker.reverse.get(share.hash, set()), key=lambda sh: -len(node.tracker.reverse.get(sh, set())))], # sorted from most children to least children
290             type_name=type(share).__name__,
291             local=dict(
292                 verified=share.hash in node.tracker.verified.items,
293                 time_first_seen=start_time if share.time_seen == 0 else share.time_seen,
294                 peer_first_received_from=share.peer_addr,
295             ),
296             share_data=dict(
297                 timestamp=share.timestamp,
298                 target=share.target,
299                 max_target=share.max_target,
300                 payout_address=bitcoin_data.script2_to_address(share.new_script, node.net.PARENT),
301                 donation=share.share_data['donation']/65535,
302                 stale_info=share.share_data['stale_info'],
303                 nonce=share.share_data['nonce'],
304                 desired_version=share.share_data['desired_version'],
305             ),
306             block=dict(
307                 hash='%064x' % share.header_hash,
308                 header=dict(
309                     version=share.header['version'],
310                     previous_block='%064x' % share.header['previous_block'],
311                     merkle_root='%064x' % share.header['merkle_root'],
312                     timestamp=share.header['timestamp'],
313                     target=share.header['bits'].target,
314                     nonce=share.header['nonce'],
315                 ),
316                 gentx=dict(
317                     hash='%064x' % share.gentx_hash,
318                     coinbase=share.share_data['coinbase'].ljust(2, '\x00').encode('hex'),
319                     value=share.share_data['subsidy']*1e-8,
320                 ),
321                 txn_count_range=[len(share.other_txs), len(share.other_txs)] if share.other_txs is not None else 1 if len(share.merkle_link['branch']) == 0 else [2**len(share.merkle_link['branch'])//2+1, 2**len(share.merkle_link['branch'])],
322             ),
323         )
324     new_root.putChild('share', WebInterface(lambda share_hash_str: get_share(share_hash_str)))
325     new_root.putChild('heads', WebInterface(lambda: ['%064x' % x for x in node.tracker.heads]))
326     new_root.putChild('verified_heads', WebInterface(lambda: ['%064x' % x for x in node.tracker.verified.heads]))
327     new_root.putChild('tails', WebInterface(lambda: ['%064x' % x for t in node.tracker.tails for x in node.tracker.reverse.get(t, set())]))
328     new_root.putChild('verified_tails', WebInterface(lambda: ['%064x' % x for t in node.tracker.verified.tails for x in node.tracker.verified.reverse.get(t, set())]))
329     new_root.putChild('best_share_hash', WebInterface(lambda: '%064x' % node.best_share_var.value))
330     def get_share_data(share_hash_str):
331         if int(share_hash_str, 16) not in node.tracker.items:
332             return ''
333         share = node.tracker.items[int(share_hash_str, 16)]
334         return p2pool_data.share_type.pack(share.as_share1a())
335     new_root.putChild('share_data', WebInterface(lambda share_hash_str: get_share_data(share_hash_str), 'application/octet-stream'))
336     new_root.putChild('currency_info', WebInterface(lambda: dict(
337         symbol=node.net.PARENT.SYMBOL,
338         block_explorer_url_prefix=node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
339         address_explorer_url_prefix=node.net.PARENT.ADDRESS_EXPLORER_URL_PREFIX,
340     )))
341     new_root.putChild('version', WebInterface(lambda: p2pool.__version__))
342     
343     hd_path = os.path.join(datadir_path, 'graph_db')
344     hd_data = _atomic_read(hd_path)
345     hd_obj = {}
346     if hd_data is not None:
347         try:
348             hd_obj = json.loads(hd_data)
349         except Exception:
350             log.err(None, 'Error reading graph database:')
351     dataview_descriptions = {
352         'last_hour': graph.DataViewDescription(150, 60*60),
353         'last_day': graph.DataViewDescription(300, 60*60*24),
354         'last_week': graph.DataViewDescription(300, 60*60*24*7),
355         'last_month': graph.DataViewDescription(300, 60*60*24*30),
356         'last_year': graph.DataViewDescription(300, 60*60*24*365.25),
357     }
358     def build_desired_rates(ds_name, ds_desc, dv_name, dv_desc, obj):
359         if not obj:
360             last_bin_end = 0
361             bins = dv_desc.bin_count*[{}]
362         else:
363             pool_rates = obj['pool_rates'][dv_name]
364             desired_versions = obj['desired_versions'][dv_name]
365             def get_total_pool_rate(t):
366                 n = int((pool_rates['last_bin_end'] - t)/dv_desc.bin_width)
367                 if n < 0 or n >= dv_desc.bin_count:
368                     return None
369                 total = sum(x[0] for x in pool_rates['bins'][n].values())
370                 count = math.mean(x[1] for x in pool_rates['bins'][n].values())
371                 if count == 0:
372                     return None
373                 return total/count
374             last_bin_end = desired_versions['last_bin_end']
375             bins = [dict((name, (total*get_total_pool_rate(last_bin_end - (i+1/2)*dv_desc.bin_width), count)) for name, (total, count) in desired_versions['bins'][i].iteritems()) for i in xrange(dv_desc.bin_count)]
376         return graph.DataView(dv_desc, ds_desc, last_bin_end, bins)
377     hd = graph.HistoryDatabase.from_obj({
378         'local_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
379         'local_dead_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
380         'local_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
381         'local_dead_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
382         'pool_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
383             multivalue_undefined_means_0=True),
384         'current_payout': graph.DataStreamDescription(dataview_descriptions),
385         'current_payouts': graph.DataStreamDescription(dataview_descriptions, multivalues=True),
386         'incoming_peers': graph.DataStreamDescription(dataview_descriptions),
387         'outgoing_peers': graph.DataStreamDescription(dataview_descriptions),
388         'miner_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
389         'miner_dead_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
390         'desired_versions': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
391             multivalue_undefined_means_0=True),
392         'desired_version_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
393             multivalue_undefined_means_0=True, default_func=build_desired_rates),
394         'traffic_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
395         'getwork_latency': graph.DataStreamDescription(dataview_descriptions),
396         'memory_usage': graph.DataStreamDescription(dataview_descriptions),
397     }, hd_obj)
398     x = task.LoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj())))
399     x.start(100)
400     stop_event.watch(x.stop)
401     @wb.pseudoshare_received.watch
402     def _(work, dead, user):
403         t = time.time()
404         hd.datastreams['local_hash_rate'].add_datum(t, work)
405         if dead:
406             hd.datastreams['local_dead_hash_rate'].add_datum(t, work)
407         if user is not None:
408             hd.datastreams['miner_hash_rates'].add_datum(t, {user: work})
409             if dead:
410                 hd.datastreams['miner_dead_hash_rates'].add_datum(t, {user: work})
411     @wb.share_received.watch
412     def _(work, dead):
413         t = time.time()
414         hd.datastreams['local_share_hash_rate'].add_datum(t, work)
415         if dead:
416             hd.datastreams['local_dead_share_hash_rate'].add_datum(t, work)
417     @node.p2p_node.traffic_happened.watch
418     def _(name, bytes):
419         hd.datastreams['traffic_rate'].add_datum(time.time(), {name: bytes})
420     def add_point():
421         if node.tracker.get_height(node.best_share_var.value) < 10:
422             return None
423         lookbehind = min(node.net.CHAIN_LENGTH, 60*60//node.net.SHARE_PERIOD, node.tracker.get_height(node.best_share_var.value))
424         t = time.time()
425         
426         pool_rates = p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, lookbehind, rates=True)
427         pool_total = sum(pool_rates.itervalues())
428         hd.datastreams['pool_rates'].add_datum(t, pool_rates)
429         
430         current_txouts = node.get_current_txouts()
431         hd.datastreams['current_payout'].add_datum(t, current_txouts.get(bitcoin_data.pubkey_hash_to_script2(wb.my_pubkey_hash), 0)*1e-8)
432         miner_hash_rates, miner_dead_hash_rates = get_local_rates()
433         current_txouts_by_address = dict((bitcoin_data.script2_to_address(script, node.net.PARENT), amount) for script, amount in current_txouts.iteritems())
434         hd.datastreams['current_payouts'].add_datum(t, dict((user, current_txouts_by_address[user]*1e-8) for user in miner_hash_rates if user in current_txouts_by_address))
435         
436         hd.datastreams['incoming_peers'].add_datum(t, sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming))
437         hd.datastreams['outgoing_peers'].add_datum(t, sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming))
438         
439         vs = p2pool_data.get_desired_version_counts(node.tracker, node.best_share_var.value, lookbehind)
440         vs_total = sum(vs.itervalues())
441         hd.datastreams['desired_versions'].add_datum(t, dict((str(k), v/vs_total) for k, v in vs.iteritems()))
442         hd.datastreams['desired_version_rates'].add_datum(t, dict((str(k), v/vs_total*pool_total) for k, v in vs.iteritems()))
443         try:
444             hd.datastreams['memory_usage'].add_datum(t, memory.resident())
445         except:
446             if p2pool.DEBUG:
447                 traceback.print_exc()
448     x = task.LoopingCall(add_point)
449     x.start(5)
450     stop_event.watch(x.stop)
451     @node.bitcoind_work.changed.watch
452     def _(new_work):
453         hd.datastreams['getwork_latency'].add_datum(time.time(), new_work['latency'])
454     new_root.putChild('graph_data', WebInterface(lambda source, view: hd.datastreams[source].dataviews[view].get_data(time.time())))
455     
456     web_root.putChild('static', static.File(os.path.join(os.path.dirname(sys.argv[0]), 'web-static')))
457     
458     return web_root