from __future__ import division
import random
+import tempfile
from twisted.internet import defer, reactor
from twisted.trial import unittest
-from twisted.web import resource, server
+from twisted.web import client, resource, server
from p2pool import data, node, work
from p2pool.bitcoin import data as bitcoin_data, networks, worker_interface
self.n.p2p_node.start()
wb = work.WorkerBridge(node=self.n, my_pubkey_hash=random.randrange(2**160), donation_percentage=random.uniform(0, 10), merged_urls=merged_urls, worker_fee=3)
+ self.wb = wb
web_root = resource.Resource()
worker_interface.WorkerInterface(wb).attach_to(web_root)
self.web_port = reactor.listenTCP(0, server.Site(web_root))
blah = yield proxy.rpc_getwork()
yield proxy.rpc_getwork(blah['data'])
+
yield deferral.sleep(3)
assert len(n.tracker.items) == 100
proxy = jsonrpc.Proxy('http://127.0.0.1:' + str(random.choice(nodes).web_port.getHost().port))
blah = yield proxy.rpc_getwork()
yield proxy.rpc_getwork(blah['data'])
- yield deferral.sleep(random.expovariate(1/.1))
+ yield deferral.sleep(.02)
print i
print type(nodes[0].n.tracker.items[nodes[0].n.best_share_var.value])
-
+
+ # crawl web pages
+ from p2pool import web
+ stop_event = variable.Event()
+ web2_root = web.get_web_root(nodes[0].wb, tempfile.mkdtemp(), variable.Variable(None), stop_event)
+ web2_port = reactor.listenTCP(0, server.Site(web2_root))
+ for name in web2_root.listNames() + ['web/' + x for x in web2_root.getChildWithDefault('web', None).listNames()]:
+ print
+ print name
+ try:
+ res = yield client.getPage('http://127.0.0.1:%i/%s' % (web2_port.getHost().port, name))
+ except:
+ import traceback
+ traceback.print_exc()
+ else:
+ print repr(res)[:100]
+ print
+ yield web2_port.stopListening()
+ stop_event.happened()
+ del web2_root
+
yield deferral.sleep(3)
for i, n in enumerate(nodes):
import p2pool
from bitcoin import data as bitcoin_data
from . import data as p2pool_data
-from util import deferred_resource, graph, math, pack
+from util import deferred_resource, graph, math, pack, variable
def _atomic_read(filename):
try:
os.remove(filename)
os.rename(filename + '.new', filename)
-def get_web_root(wb, datadir_path, bitcoind_warning_var):
+def get_web_root(wb, datadir_path, bitcoind_warning_var, stop_event=variable.Event()):
node = wb.node
start_time = time.time()
res = yield self.func(*self.args)
defer.returnValue(json.dumps(res) if self.mime_type == 'application/json' else res)
- web_root.putChild('rate', WebInterface(lambda: p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, 720)/(1-p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, 720))))
+ def decent_height():
+ return min(node.tracker.get_height(node.best_share_var.value), 720)
+ web_root.putChild('rate', WebInterface(lambda: p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, decent_height())/(1-p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, decent_height()))))
web_root.putChild('difficulty', WebInterface(lambda: bitcoin_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target)))
web_root.putChild('users', WebInterface(get_users))
web_root.putChild('user_stales', WebInterface(lambda: dict((bitcoin_data.pubkey_hash_to_address(ph, node.net.PARENT), prop) for ph, prop in
share='%064x' % s.hash,
) for s in node.tracker.get_chain(node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 24*60*60//node.net.SHARE_PERIOD)) if s.pow_hash <= s.header['bits'].target]))
web_root.putChild('uptime', WebInterface(lambda: time.time() - start_time))
- web_root.putChild('stale_rates', WebInterface(lambda: p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, 720, rates=True)))
+ web_root.putChild('stale_rates', WebInterface(lambda: p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, decent_height(), rates=True)))
new_root = resource.Resource()
web_root.putChild('web', new_root)
with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
f.write(json.dumps(stat_log))
- task.LoopingCall(update_stat_log).start(5*60)
+ x = task.LoopingCall(update_stat_log)
+ x.start(5*60)
+ stop_event.watch(x.stop)
new_root.putChild('log', WebInterface(lambda: stat_log))
def get_share(share_hash_str):
'traffic_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
'getwork_latency': graph.DataStreamDescription(dataview_descriptions),
}, hd_obj)
- task.LoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj()))).start(100)
+ x = task.LoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj())))
+ x.start(100)
+ stop_event.watch(x.stop)
@wb.pseudoshare_received.watch
def _(work, dead, user):
t = time.time()
vs_total = sum(vs.itervalues())
hd.datastreams['desired_versions'].add_datum(t, dict((str(k), v/vs_total) for k, v in vs.iteritems()))
hd.datastreams['desired_version_rates'].add_datum(t, dict((str(k), v/vs_total*pool_total) for k, v in vs.iteritems()))
- task.LoopingCall(add_point).start(5)
+ x = task.LoopingCall(add_point)
+ x.start(5)
+ stop_event.watch(x.stop)
@node.bitcoind_work.changed.watch
def _(new_work):
hd.datastreams['getwork_latency'].add_datum(time.time(), new_work['latency'])