os.remove(filename)
os.rename(filename + '.new', filename)
-def get_web_root(tracker, current_work, current_work2, get_current_txouts, datadir_path, net, get_stale_counts, my_pubkey_hash, local_rate_monitor, worker_fee, p2p_node, my_share_hashes, recent_blocks, pseudoshare_received):
+def get_web_root(tracker, current_work, current_work2, get_current_txouts, datadir_path, net, get_stale_counts, my_pubkey_hash, local_rate_monitor, worker_fee, p2p_node, my_share_hashes, recent_blocks, pseudoshare_received, share_received):
start_time = time.time()
web_root = resource.Resource()
grapher.add_poolrate_point(poolrate, poolrate - nonstalerate)
task.LoopingCall(add_point).start(100)
@pseudoshare_received.watch
- def _(work, dead, user):
+ def _(work, dead, user, had_vip_pass):
reactor.callLater(1, grapher.add_localrate_point, work, dead)
- if user is not None:
+ if user is not None and had_vip_pass:
reactor.callLater(1, grapher.add_localminer_point, user, work, dead)
hd_path = os.path.join(datadir_path, 'graph_db')
'last_month': graph.DataViewDescription(300, 60*60*24*30),
'last_year': graph.DataViewDescription(300, 60*60*24*365.25),
}
+ def combine_and_keep_largest(*dicts):
+ res = {}
+ for d in dicts:
+ for k, v in d.iteritems():
+ res[k] = res.get(k, 0) + v
+ return dict((k, v) for k, v in sorted(res.iteritems(), key=lambda (k, v): v)[-30:] if v)
hd = graph.HistoryDatabase.from_obj({
'local_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
'local_dead_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
+ 'local_share_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
+ 'local_dead_share_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
'pool_rate': graph.DataStreamDescription(True, dataview_descriptions),
'pool_stale_rate': graph.DataStreamDescription(True, dataview_descriptions),
'current_payout': graph.DataStreamDescription(True, dataview_descriptions),
'incoming_peers': graph.DataStreamDescription(True, dataview_descriptions),
'outgoing_peers': graph.DataStreamDescription(True, dataview_descriptions),
+ 'miner_hash_rates': graph.DataStreamDescription(False, dataview_descriptions, {}, combine_and_keep_largest, math.mult_dict),
+ 'miner_dead_hash_rates': graph.DataStreamDescription(False, dataview_descriptions, {}, combine_and_keep_largest, math.mult_dict),
}, hd_obj)
task.LoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj()))).start(100)
@pseudoshare_received.watch
- def _(work, dead, user):
+ def _(work, dead, user, had_vip_pass):
t = time.time()
hd.datastreams['local_hash_rate'].add_datum(t, work)
if dead:
hd.datastreams['local_dead_hash_rate'].add_datum(t, work)
+ if user is not None:
+ hd.datastreams['miner_hash_rates'].add_datum(t, {user: work})
+ if dead:
+ hd.datastreams['miner_dead_hash_rates'].add_datum(t, {user: work})
+ @share_received.watch
+ def _(work, dead):
+ t = time.time()
+ hd.datastreams['local_share_hash_rate'].add_datum(t, work)
+ if dead:
+ hd.datastreams['local_dead_share_hash_rate'].add_datum(t, work)
def add_point():
if tracker.get_height(current_work.value['best_share_hash']) < 720:
return