from p2pool.util import graph
class Test(unittest.TestCase):
- def test_combine_and_keep_largest(self):
- f = graph.combine_and_keep_largest(3, 'squashed')
- a, b = dict(a=1, b=2, c=3, d=4, e=5), dict(a=1, b=3, c=5, d=7, e=9)
- res = f(a, b)
- assert res == {'squashed': 15, 'e': 14, 'd': 11}
- assert f(res, dict(squashed=100)) == {'squashed': 115, 'e': 14, 'd': 11}
+ def test_keep_largest(self):
+ b = dict(a=1, b=3, c=5, d=7, e=9)
+ assert graph.keep_largest(3, 'squashed')(b) == {'squashed': 9, 'd': 7, 'e': 9}
+ assert graph.keep_largest(3)(b) == {'c': 5, 'd': 7, 'e': 9}
def _((i, bin)):
left, right = last_bin_end - self.desc.bin_width*(i + 1), min(t, last_bin_end - self.desc.bin_width*i)
center, width = (left+right)/2, right-left
- if self.ds_desc.source_is_cumulative:
+ if self.ds_desc.is_gauge:
val = dict((k, total/count) for k, (total, count) in bin.iteritems())
else:
val = dict((k, total/width) for k, (total, count) in bin.iteritems())
if not self.ds_desc.multivalues:
- val = val.get(None, None if self.ds_desc.source_is_cumulative else 0)
+ val = val.get(None, None if self.ds_desc.is_gauge else 0)
return center, val, width
return map(_, enumerate(bins))
class DataStreamDescription(object):
- def __init__(self, source_is_cumulative, dataview_descriptions, multivalues=False):
- self.source_is_cumulative = source_is_cumulative
+ def __init__(self, dataview_descriptions, is_gauge=True, multivalues=False, multivalues_keep=20, multivalues_squash_key=None):
self.dataview_descriptions = dataview_descriptions
+ self.is_gauge = is_gauge
self.multivalues = multivalues
- self.keep_largest_func = keep_largest(20, None, key=lambda (t, c): t/c if self.source_is_cumulative else t, add_func=lambda (a1, b1), (a2, b2): (a1+a2, b1+b2))
+ self.keep_largest_func = keep_largest(multivalues_keep, multivalues_squash_key, key=lambda (t, c): t/c if self.is_gauge else t, add_func=lambda (a1, b1), (a2, b2): (a1+a2, b1+b2))
class DataStream(object):
def __init__(self, desc, dataviews):
'last_year': graph.DataViewDescription(300, 60*60*24*365.25),
}
hd = graph.HistoryDatabase.from_obj({
- 'local_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
- 'local_dead_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
- 'local_share_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
- 'local_dead_share_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
- 'pool_rate': graph.DataStreamDescription(True, dataview_descriptions),
- 'pool_stale_rate': graph.DataStreamDescription(True, dataview_descriptions),
- 'current_payout': graph.DataStreamDescription(True, dataview_descriptions),
- 'current_payouts': graph.DataStreamDescription(True, dataview_descriptions, multivalues=True),
- 'incoming_peers': graph.DataStreamDescription(True, dataview_descriptions),
- 'outgoing_peers': graph.DataStreamDescription(True, dataview_descriptions),
- 'miner_hash_rates': graph.DataStreamDescription(False, dataview_descriptions, multivalues=True),
- 'miner_dead_hash_rates': graph.DataStreamDescription(False, dataview_descriptions, multivalues=True),
+ 'local_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
+ 'local_dead_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
+ 'local_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
+ 'local_dead_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
+ 'pool_rate': graph.DataStreamDescription(dataview_descriptions),
+ 'pool_stale_rate': graph.DataStreamDescription(dataview_descriptions),
+ 'current_payout': graph.DataStreamDescription(dataview_descriptions),
+ 'current_payouts': graph.DataStreamDescription(dataview_descriptions, multivalues=True),
+ 'incoming_peers': graph.DataStreamDescription(dataview_descriptions),
+ 'outgoing_peers': graph.DataStreamDescription(dataview_descriptions),
+ 'miner_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
+ 'miner_dead_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
}, hd_obj)
task.LoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj()))).start(100)
@pseudoshare_received.watch