import argparse
import os
import random
-import struct
import sys
import time
-import json
import signal
import traceback
import urlparse
-from twisted.internet import defer, error, reactor, protocol, task
-from twisted.web import server, resource
+from twisted.internet import defer, reactor, protocol, task
+from twisted.web import server
from twisted.python import log
from nattraverso import portmapper, ipdiscover
import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
-from bitcoin import worker_interface
+from bitcoin import worker_interface, height_tracker
from util import expiring_dict, jsonrpc, variable, deferral, math, logging, pack
-from . import p2p, networks, graphs
+from . import p2p, networks, web
import p2pool, p2pool.data as p2pool_data
@deferral.retry('Error getting work from bitcoind:', 3)
try:
print 'p2pool (version %s)' % (p2pool.__version__,)
print
- try:
- from . import draw
- except ImportError:
- draw = None
- print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
- print
# connect to bitcoind over JSON-RPC and do initial getmemorypool
url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
))
yield set_real_work1()
- if '\ngetblock ' in (yield deferral.retry()(bitcoind.rpc_help)()):
- height_cacher = deferral.DeferredCacher(defer.inlineCallbacks(lambda block_hash: defer.returnValue((yield bitcoind.rpc_getblock('%x' % (block_hash,)))['blockcount'])))
- best_height_cached = variable.Variable((yield deferral.retry()(height_cacher)(pre_current_work.value['previous_block'])))
- def get_height_rel_highest(block_hash):
- this_height = height_cacher.call_now(block_hash, 0)
- best_height = height_cacher.call_now(pre_current_work.value['previous_block'], 0)
- best_height_cached.set(max(best_height_cached.value, this_height, best_height))
- return this_height - best_height_cached.value
- else:
- get_height_rel_highest = bitcoin_p2p.HeightTracker(bitcoind, factory, 5*net.SHARE_PERIOD*net.CHAIN_LENGTH/net.PARENT.BLOCK_PERIOD).get_height_rel_highest
+ get_height_rel_highest = yield height_tracker.get_height_rel_highest_func(bitcoind, factory, pre_current_work, net)
def set_real_work2():
best, desired = tracker.think(get_height_rel_highest, pre_current_work.value['previous_block'], pre_current_work.value['bits'])
print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
peer.sendShares(shares)
+ @deferral.retry('Error submitting block: (will retry)', 10, 10)
+ @defer.inlineCallbacks
+ def submit_block(block, ignore_failure):
+ success = yield bitcoind.rpc_getmemorypool(bitcoin_data.block_type.pack(block).encode('hex'))
+ success_expected = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(block['header'])) <= block['header']['bits'].target
+ if (not success and success_expected and not ignore_failure) or (success and not success_expected):
+ print >>sys.stderr, 'Block submittal result: %s Expected: %s' % (result, expected_result)
+
@tracker.verified.added.watch
def _(share):
if share.pow_hash <= share.header['bits'].target:
- if factory.conn.value is not None:
- factory.conn.value.send_block(block=share.as_block(tracker))
- else:
- print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Erp!'
+ submit_block(share.as_block(tracker), ignore_failure=True)
print
print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % (p2pool_data.format_hash(share.hash), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
print
)
p2p_node.start()
- def save_addrs():
- open(os.path.join(datadir_path, 'addrs.txt'), 'w').writelines(repr(x) + '\n' for x in p2p_node.addr_store.iteritems())
- task.LoopingCall(save_addrs).start(60)
+ task.LoopingCall(lambda: open(os.path.join(datadir_path, 'addrs.txt'), 'w').writelines(repr(x) + '\n' for x in p2p_node.addr_store.iteritems())).start(60)
# send share when the chain changes to their chain
def work_changed(new_work):
print ' ...success!'
print
- start_time = time.time()
-
- @defer.inlineCallbacks
- def upnp_thread():
- while True:
- try:
- is_lan, lan_ip = yield ipdiscover.get_local_ip()
- if is_lan:
- pm = yield portmapper.get_port_mapper()
- yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
- except defer.TimeoutError:
- pass
- except:
- if p2pool.DEBUG:
- log.err(None, "UPnP error:")
- yield deferral.sleep(random.expovariate(1/120))
-
if args.upnp:
+ @defer.inlineCallbacks
+ def upnp_thread():
+ while True:
+ try:
+ is_lan, lan_ip = yield ipdiscover.get_local_ip()
+ if is_lan:
+ pm = yield portmapper.get_port_mapper()
+ yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
+ except defer.TimeoutError:
+ pass
+ except:
+ if p2pool.DEBUG:
+ log.err(None, 'UPnP error:')
+ yield deferral.sleep(random.expovariate(1/120))
upnp_thread()
# start listening for workers with a JSON-RPC server
return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
+ pseudoshare_received = variable.Event()
local_rate_monitor = math.RateMonitor(10*60)
class WorkerBridge(worker_interface.WorkerBridge):
def preprocess_request(self, request):
user = request.getUser() if request.getUser() is not None else ''
- pubkey_hash = my_pubkey_hash
- max_target = 2**256 - 1
+
+ desired_pseudoshare_target = None
+ if '+' in user:
+ user, desired_pseudoshare_difficulty_str = user.rsplit('+', 1)
+ try:
+ desired_pseudoshare_target = bitcoin_data.difficulty_to_target(float(desired_pseudoshare_difficulty_str))
+ except:
+ pass
+
+ desired_share_target = 2**256 - 1
if '/' in user:
user, min_diff_str = user.rsplit('/', 1)
try:
- max_target = bitcoin_data.difficulty_to_target(float(min_diff_str))
+ desired_share_target = bitcoin_data.difficulty_to_target(float(min_diff_str))
except:
pass
- try:
- pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, net.PARENT)
- except: # XXX blah
- pass
+
if random.uniform(0, 100) < args.worker_fee:
pubkey_hash = my_pubkey_hash
- return pubkey_hash, max_target
+ else:
+ try:
+ pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, net.PARENT)
+ except: # XXX blah
+ pubkey_hash = my_pubkey_hash
+
+ return pubkey_hash, desired_share_target, desired_pseudoshare_target
- def get_work(self, pubkey_hash, max_target):
+ def get_work(self, pubkey_hash, desired_share_target, desired_pseudoshare_target):
if len(p2p_node.peers) == 0 and net.PERSIST:
raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
if current_work.value['best_share_hash'] is None and net.PERSIST:
mm_data = ''
mm_later = []
- new = time.time() > net.SWITCH_TIME
-
- if new:
- share_info, generate_tx = p2pool_data.new_generate_transaction(
- tracker=tracker,
- share_data=dict(
- previous_share_hash=current_work.value['best_share_hash'],
- coinbase=(mm_data + current_work.value['coinbaseflags'])[:100],
- nonce=random.randrange(2**32),
- pubkey_hash=pubkey_hash,
- subsidy=current_work2.value['subsidy'],
- donation=math.perfect_round(65535*args.donation_percentage/100),
- stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
- 253 if orphans > orphans_recorded_in_chain else
- 254 if doas > doas_recorded_in_chain else
- 0
- )(*get_stale_counts()),
- ),
- block_target=current_work.value['bits'].target,
- desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
- desired_target=max_target,
- net=net,
- )
- else:
- share_info, generate_tx = p2pool_data.generate_transaction(
- tracker=tracker,
- share_data=dict(
- previous_share_hash=current_work.value['best_share_hash'],
- coinbase=(mm_data + current_work.value['coinbaseflags'])[:100],
- nonce=struct.pack('<Q', random.randrange(2**64)),
- new_script=bitcoin_data.pubkey_hash_to_script2(pubkey_hash),
- subsidy=current_work2.value['subsidy'],
- donation=math.perfect_round(65535*args.donation_percentage/100),
- stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
- 253 if orphans > orphans_recorded_in_chain else
- 254 if doas > doas_recorded_in_chain else
- 0
- )(*get_stale_counts()),
- ),
- block_target=current_work.value['bits'].target,
- desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
- net=net,
- )
+ share_info, generate_tx = p2pool_data.generate_transaction(
+ tracker=tracker,
+ share_data=dict(
+ previous_share_hash=current_work.value['best_share_hash'],
+ coinbase=(mm_data + current_work.value['coinbaseflags'])[:100],
+ nonce=random.randrange(2**32),
+ pubkey_hash=pubkey_hash,
+ subsidy=current_work2.value['subsidy'],
+ donation=math.perfect_round(65535*args.donation_percentage/100),
+ stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
+ 253 if orphans > orphans_recorded_in_chain else
+ 254 if doas > doas_recorded_in_chain else
+ 0
+ )(*get_stale_counts()),
+ ),
+ block_target=current_work.value['bits'].target,
+ desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
+ desired_target=desired_share_target,
+ net=net,
+ )
target = net.PARENT.SANE_MAX_TARGET
- if len(self.recent_shares_ts_work) == 50:
- hash_rate = sum(work for ts, work in self.recent_shares_ts_work)//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
- target = min(target, 2**256//(hash_rate * 5))
+ if desired_pseudoshare_target is None:
+ if len(self.recent_shares_ts_work) == 50:
+ hash_rate = sum(work for ts, work in self.recent_shares_ts_work[1:])//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
+ target = min(target, 2**256//hash_rate)
+ else:
+ target = min(target, desired_pseudoshare_target)
target = max(target, share_info['bits'].target)
for aux_work in current_work.value['mm_chains'].itervalues():
target = max(target, aux_work['target'])
try:
if pow_hash <= header['bits'].target or p2pool.DEBUG:
- @deferral.retry('Error submitting primary block: (will retry)', 10, 10)
- def submit_block():
- if factory.conn.value is None:
- print >>sys.stderr, 'No bitcoind connection when block submittal attempted! %s%32x' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
- raise deferral.RetrySilentlyException()
- factory.conn.value.send_block(block=dict(header=header, txs=transactions))
- submit_block()
+ submit_block(dict(header=header, txs=transactions), ignore_failure=False)
if pow_hash <= header['bits'].target:
print
print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
log.err(None, 'Error while processing merged mining POW:')
if pow_hash <= share_info['bits'].target:
- if new:
- min_header = dict(header);del min_header['merkle_root']
- hash_link = p2pool_data.prefix_to_hash_link(packed_generate_tx[:-32-4], p2pool_data.gentx_before_refhash)
- share = p2pool_data.NewShare(net, min_header, share_info, hash_link=hash_link, merkle_branch=merkle_branch, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
- else:
- share = p2pool_data.Share(net, header, share_info, merkle_branch=merkle_branch, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
+ min_header = dict(header);del min_header['merkle_root']
+ hash_link = p2pool_data.prefix_to_hash_link(packed_generate_tx[:-32-4], p2pool_data.gentx_before_refhash)
+ share = p2pool_data.Share(net, None, min_header, share_info, hash_link=hash_link, merkle_branch=merkle_branch, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
+
print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
request.getUser(),
p2pool_data.format_hash(share.hash),
except:
log.err(None, 'Error forwarding block solution:')
- if pow_hash <= target and header_hash not in received_header_hashes:
- reactor.callLater(1, grapher.add_localrate_point, bitcoin_data.target_to_average_attempts(target), not on_time)
- if request.getPassword() == vip_pass:
- reactor.callLater(1, grapher.add_localminer_point, request.getUser(), bitcoin_data.target_to_average_attempts(target), not on_time)
- self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
- while len(self.recent_shares_ts_work) > 50:
- self.recent_shares_ts_work.pop(0)
- local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=request.getUser()))
-
- if header_hash in received_header_hashes:
- print >>sys.stderr, 'Worker %s @ %s submitted share more than once!' % (request.getUser(), request.getClientIP())
- received_header_hashes.add(header_hash)
-
if pow_hash > target:
print 'Worker %s submitted share with hash > target:' % (request.getUser(),)
print ' Hash: %56x' % (pow_hash,)
print ' Target: %56x' % (target,)
+ elif header_hash in received_header_hashes:
+ print >>sys.stderr, 'Worker %s @ %s submitted share more than once!' % (request.getUser(), request.getClientIP())
+ else:
+ received_header_hashes.add(header_hash)
+
+ pseudoshare_received.happened(bitcoin_data.target_to_average_attempts(target), not on_time, request.getUser() if request.getPassword() == vip_pass else None)
+ self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
+ while len(self.recent_shares_ts_work) > 50:
+ self.recent_shares_ts_work.pop(0)
+ local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=request.getUser()))
return on_time
return ba, got_response
- web_root = resource.Resource()
- worker_interface.WorkerInterface(WorkerBridge()).attach_to(web_root)
-
- def get_rate():
- if tracker.get_height(current_work.value['best_share_hash']) < 720:
- return json.dumps(None)
- return json.dumps(p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
- / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720)))
-
- def get_users():
- height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
- weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
- res = {}
- for script in sorted(weights, key=lambda s: weights[s]):
- res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
- return json.dumps(res)
-
- def get_current_txouts():
- share = tracker.shares[current_work.value['best_share_hash']]
- if isinstance(share, p2pool_data.NewShare):
- share_info, gentx = p2pool_data.new_generate_transaction(tracker, share.share_info['share_data'], share.header['bits'].target, share.share_info['timestamp'], share.share_info['bits'].target, share.net)
- else:
- share_info, gentx = p2pool_data.generate_transaction(tracker, share.share_info['share_data'], share.header['bits'].target, share.share_info['timestamp'], share.net)
- return dict((out['script'], out['value']) for out in gentx['tx_outs'])
-
- def get_current_scaled_txouts(scale, trunc=0):
- txouts = get_current_txouts()
- total = sum(txouts.itervalues())
- results = dict((script, value*scale//total) for script, value in txouts.iteritems())
- if trunc > 0:
- total_random = 0
- random_set = set()
- for s in sorted(results, key=results.__getitem__):
- if results[s] >= trunc:
- break
- total_random += results[s]
- random_set.add(s)
- if total_random:
- winner = math.weighted_choice((script, results[script]) for script in random_set)
- for script in random_set:
- del results[script]
- results[winner] = total_random
- if sum(results.itervalues()) < int(scale):
- results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
- return results
-
- def get_current_payouts():
- return json.dumps(dict((bitcoin_data.script2_to_human(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems()))
-
- def get_patron_sendmany(this):
- try:
- if '/' in this:
- this, trunc = this.split('/', 1)
- else:
- trunc = '0.01'
- return json.dumps(dict(
- (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
- for script, value in get_current_scaled_txouts(scale=int(float(this)*1e8), trunc=int(float(trunc)*1e8)).iteritems()
- if bitcoin_data.script2_to_address(script, net.PARENT) is not None
- ))
- except:
- return json.dumps(None)
-
- def get_global_stats():
- # averaged over last hour
- lookbehind = 3600//net.SHARE_PERIOD
- if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
- return None
-
- nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
- stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
- return json.dumps(dict(
- pool_nonstale_hash_rate=nonstale_hash_rate,
- pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
- pool_stale_prop=stale_prop,
- ))
-
- def get_local_stats():
- lookbehind = 3600//net.SHARE_PERIOD
- if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
- return None
-
- global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
-
- my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
- my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
- my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
- my_share_count = my_unstale_count + my_orphan_count + my_doa_count
- my_stale_count = my_orphan_count + my_doa_count
-
- my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
-
- my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
- for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1)
- if share.hash in my_share_hashes)
- actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp -
- tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp)
- share_att_s = my_work / actual_time
-
- miner_hash_rates = {}
- miner_dead_hash_rates = {}
- datums, dt = local_rate_monitor.get_datums_in_last()
- for datum in datums:
- miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
- if datum['dead']:
- miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
-
- return json.dumps(dict(
- my_hash_rates_in_last_hour=dict(
- note="DEPRECATED",
- nonstale=share_att_s,
- rewarded=share_att_s/(1 - global_stale_prop),
- actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
- ),
- my_share_counts_in_last_hour=dict(
- shares=my_share_count,
- unstale_shares=my_unstale_count,
- stale_shares=my_stale_count,
- orphan_stale_shares=my_orphan_count,
- doa_stale_shares=my_doa_count,
- ),
- my_stale_proportions_in_last_hour=dict(
- stale=my_stale_prop,
- orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
- dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
- ),
- miner_hash_rates=miner_hash_rates,
- miner_dead_hash_rates=miner_dead_hash_rates,
- ))
-
- def get_peer_addresses():
- return ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues())
-
- def get_uptime():
- return json.dumps(time.time() - start_time)
+ get_current_txouts = lambda: p2pool_data.get_expected_payouts(tracker, current_work.value['best_share_hash'], current_work.value['bits'].target, current_work2.value['subsidy'], net)
- class WebInterface(resource.Resource):
- def __init__(self, func, mime_type, *fields):
- self.func, self.mime_type, self.fields = func, mime_type, fields
-
- def render_GET(self, request):
- request.setHeader('Content-Type', self.mime_type)
- request.setHeader('Access-Control-Allow-Origin', '*')
- return self.func(*(request.args[field][0] for field in self.fields))
-
- web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
- web_root.putChild('users', WebInterface(get_users, 'application/json'))
- web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
- web_root.putChild('current_payouts', WebInterface(get_current_payouts, 'application/json'))
- web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain', 'total'))
- web_root.putChild('global_stats', WebInterface(get_global_stats, 'application/json'))
- web_root.putChild('local_stats', WebInterface(get_local_stats, 'application/json'))
- web_root.putChild('peer_addresses', WebInterface(get_peer_addresses, 'text/plain'))
- web_root.putChild('payout_addr', WebInterface(lambda: json.dumps(bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)), 'application/json'))
- web_root.putChild('recent_blocks', WebInterface(lambda: json.dumps(recent_blocks), 'application/json'))
- web_root.putChild('uptime', WebInterface(get_uptime, 'application/json'))
- if draw is not None:
- web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
-
- new_root = resource.Resource()
- web_root.putChild('web', new_root)
-
- stat_log = []
- if os.path.exists(os.path.join(datadir_path, 'stats')):
- try:
- with open(os.path.join(datadir_path, 'stats'), 'rb') as f:
- stat_log = json.loads(f.read())
- except:
- log.err(None, 'Error loading stats:')
- def update_stat_log():
- while stat_log and stat_log[0]['time'] < time.time() - 24*60*60:
- stat_log.pop(0)
-
- lookbehind = 3600//net.SHARE_PERIOD
- if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
- return None
-
- global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
- (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
-
- miner_hash_rates = {}
- miner_dead_hash_rates = {}
- datums, dt = local_rate_monitor.get_datums_in_last()
- for datum in datums:
- miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
- if datum['dead']:
- miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
-
- stat_log.append(dict(
- time=time.time(),
- pool_hash_rate=p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)/(1-global_stale_prop),
- pool_stale_prop=global_stale_prop,
- local_hash_rates=miner_hash_rates,
- local_dead_hash_rates=miner_dead_hash_rates,
- shares=shares,
- stale_shares=stale_orphan_shares + stale_doa_shares,
- stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares),
- current_payout=get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8,
- peers=dict(
- incoming=sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
- outgoing=sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming),
- ),
- attempts_to_share=bitcoin_data.target_to_average_attempts(tracker.shares[current_work.value['best_share_hash']].max_target),
- attempts_to_block=bitcoin_data.target_to_average_attempts(current_work.value['bits'].target),
- block_value=current_work2.value['subsidy']*1e-8,
- ))
-
- with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
- f.write(json.dumps(stat_log))
- task.LoopingCall(update_stat_log).start(5*60)
- new_root.putChild('log', WebInterface(lambda: json.dumps(stat_log), 'application/json'))
+ web_root = web.get_web_root(tracker, current_work, current_work2, get_current_txouts, datadir_path, net, get_stale_counts, my_pubkey_hash, local_rate_monitor, args.worker_fee, p2p_node, my_share_hashes, recent_blocks, pseudoshare_received)
+ worker_interface.WorkerInterface(WorkerBridge()).attach_to(web_root)
- grapher = graphs.Grapher(os.path.join(datadir_path, 'rrd'))
- web_root.putChild('graphs', grapher.get_resource())
- def add_point():
- if tracker.get_height(current_work.value['best_share_hash']) < 720:
- return
- nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
- poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
- grapher.add_poolrate_point(poolrate, poolrate - nonstalerate)
- task.LoopingCall(add_point).start(100)
+ deferral.retry('Error binding to worker port:', traceback=False)(reactor.listenTCP)(worker_endpoint[1], server.Site(web_root), interface=worker_endpoint[0])
- def attempt_listen():
- try:
- reactor.listenTCP(worker_endpoint[1], server.Site(web_root), interface=worker_endpoint[0])
- except error.CannotListenError, e:
- print >>sys.stderr, 'Error binding to worker port: %s. Retrying in 1 second.' % (e.socketError,)
- reactor.callLater(1, attempt_listen)
- else:
- with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
- pass
- attempt_listen()
+ with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
+ pass
print ' ...success!'
print
if share.pow_hash <= share.header['bits'].target and share.header_hash not in self.announced_hashes and abs(share.timestamp - time.time()) < 10*60:
self.announced_hashes.add(share.header_hash)
message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
- self.delayed_messages[message] = reactor.callLater(random.expovariate(1/5), lambda: (self.say(self.channel, message), self.delayed_messages.pop(message)))
+ self.delayed_messages[message] = reactor.callLater(random.expovariate(1/60), lambda: (self.say(self.channel, message), self.delayed_messages.pop(message)))
def connectionLost(self, reason):
tracker.verified.added.unwatch(self.watch_id)
print 'IRC connection lost:', reason.getErrorMessage()
]:
if getattr(args, var_name) is None and cp.has_option('x', conf_name):
setattr(args, var_name, var_type(cp.get('x', conf_name)))
+ if args.bitcoind_rpc_password is None:
+ parser.error('''Bitcoin configuration file didn't contain an rpcpassword= line! Add one!''')
if args.bitcoind_rpc_username is None:
args.bitcoind_rpc_username = ''