some cleanup for graph
authorForrest Voight <forrest@forre.st>
Sun, 15 Apr 2012 18:56:24 +0000 (14:56 -0400)
committerForrest Voight <forrest@forre.st>
Sun, 15 Apr 2012 19:11:00 +0000 (15:11 -0400)
p2pool/test/util/test_graph.py
p2pool/util/graph.py
p2pool/web.py

index 595c2cd..0b4dd0f 100644 (file)
@@ -3,9 +3,7 @@ import unittest
 from p2pool.util import graph
 
 class Test(unittest.TestCase):
-    def test_combine_and_keep_largest(self):
-        f = graph.combine_and_keep_largest(3, 'squashed')
-        a, b = dict(a=1, b=2, c=3, d=4, e=5), dict(a=1, b=3, c=5, d=7, e=9)
-        res = f(a, b)
-        assert res == {'squashed': 15, 'e': 14, 'd': 11}
-        assert f(res, dict(squashed=100)) == {'squashed': 115, 'e': 14, 'd': 11}
+    def test_keep_largest(self):
+        b = dict(a=1, b=3, c=5, d=7, e=9)
+        assert graph.keep_largest(3, 'squashed')(b) == {'squashed': 9, 'd': 7, 'e': 9}
+        assert graph.keep_largest(3)(b) == {'c': 5, 'd': 7, 'e': 9}
index 8dea8a1..2d49883 100644 (file)
@@ -59,22 +59,22 @@ class DataView(object):
         def _((i, bin)):
             left, right = last_bin_end - self.desc.bin_width*(i + 1), min(t, last_bin_end - self.desc.bin_width*i)
             center, width = (left+right)/2, right-left
-            if self.ds_desc.source_is_cumulative:
+            if self.ds_desc.is_gauge:
                 val = dict((k, total/count) for k, (total, count) in bin.iteritems())
             else:
                 val = dict((k, total/width) for k, (total, count) in bin.iteritems())
             if not self.ds_desc.multivalues:
-                val = val.get(None, None if self.ds_desc.source_is_cumulative else 0)
+                val = val.get(None, None if self.ds_desc.is_gauge else 0)
             return center, val, width
         return map(_, enumerate(bins))
 
 
 class DataStreamDescription(object):
-    def __init__(self, source_is_cumulative, dataview_descriptions, multivalues=False):
-        self.source_is_cumulative = source_is_cumulative
+    def __init__(self, dataview_descriptions, is_gauge=True, multivalues=False, multivalues_keep=20, multivalues_squash_key=None):
         self.dataview_descriptions = dataview_descriptions
+        self.is_gauge = is_gauge
         self.multivalues = multivalues
-        self.keep_largest_func = keep_largest(20, None, key=lambda (t, c): t/c if self.source_is_cumulative else t, add_func=lambda (a1, b1), (a2, b2): (a1+a2, b1+b2))
+        self.keep_largest_func = keep_largest(multivalues_keep, multivalues_squash_key, key=lambda (t, c): t/c if self.is_gauge else t, add_func=lambda (a1, b1), (a2, b2): (a1+a2, b1+b2))
 
 class DataStream(object):
     def __init__(self, desc, dataviews):
index 6a469f5..153eb90 100644 (file)
@@ -305,18 +305,18 @@ def get_web_root(tracker, current_work, current_work2, get_current_txouts, datad
         'last_year': graph.DataViewDescription(300, 60*60*24*365.25),
     }
     hd = graph.HistoryDatabase.from_obj({
-        'local_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
-        'local_dead_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
-        'local_share_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
-        'local_dead_share_hash_rate': graph.DataStreamDescription(False, dataview_descriptions),
-        'pool_rate': graph.DataStreamDescription(True, dataview_descriptions),
-        'pool_stale_rate': graph.DataStreamDescription(True, dataview_descriptions),
-        'current_payout': graph.DataStreamDescription(True, dataview_descriptions),
-        'current_payouts': graph.DataStreamDescription(True, dataview_descriptions, multivalues=True),
-        'incoming_peers': graph.DataStreamDescription(True, dataview_descriptions),
-        'outgoing_peers': graph.DataStreamDescription(True, dataview_descriptions),
-        'miner_hash_rates': graph.DataStreamDescription(False, dataview_descriptions, multivalues=True),
-        'miner_dead_hash_rates': graph.DataStreamDescription(False, dataview_descriptions, multivalues=True),
+        'local_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
+        'local_dead_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
+        'local_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
+        'local_dead_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
+        'pool_rate': graph.DataStreamDescription(dataview_descriptions),
+        'pool_stale_rate': graph.DataStreamDescription(dataview_descriptions),
+        'current_payout': graph.DataStreamDescription(dataview_descriptions),
+        'current_payouts': graph.DataStreamDescription(dataview_descriptions, multivalues=True),
+        'incoming_peers': graph.DataStreamDescription(dataview_descriptions),
+        'outgoing_peers': graph.DataStreamDescription(dataview_descriptions),
+        'miner_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
+        'miner_dead_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
     }, hd_obj)
     task.LoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj()))).start(100)
     @pseudoshare_received.watch