X-Git-Url: https://git.novaco.in/?a=blobdiff_plain;f=p2pool%2Fmain.py;h=fdd3d6566c2b554913c906cc5e949e5d1dc7cb43;hb=60518a079cd431d758ce646e1953d016d48ca2df;hp=4f16daa65d06fa08c2782fe2d819af54dfaf1cce;hpb=b1fe41470326748c4b47df5bce076b444c189953;p=p2pool.git diff --git a/p2pool/main.py b/p2pool/main.py index 4f16daa..fdd3d65 100644 --- a/p2pool/main.py +++ b/p2pool/main.py @@ -1,8 +1,7 @@ from __future__ import division -import ConfigParser -import StringIO import base64 +import gc import json import os import random @@ -15,52 +14,16 @@ import urlparse if '--iocp' in sys.argv: from twisted.internet import iocpreactor iocpreactor.install() -from twisted.internet import defer, reactor, protocol, task +from twisted.internet import defer, reactor, protocol, tcp from twisted.web import server from twisted.python import log from nattraverso import portmapper, ipdiscover import bitcoin.p2p as bitcoin_p2p, bitcoin.data as bitcoin_data -from bitcoin import worker_interface, height_tracker -from util import fixargparse, jsonrpc, variable, deferral, math, logging -from . import p2p, networks, web, work -import p2pool, p2pool.data as p2pool_data - -@deferral.retry('Error getting work from bitcoind:', 3) -@defer.inlineCallbacks -def getwork(bitcoind, use_getblocktemplate=False): - def go(): - if use_getblocktemplate: - return bitcoind.rpc_getblocktemplate({}) - else: - return bitcoind.rpc_getmemorypool() - try: - work = yield go() - except jsonrpc.Error_for_code(-32601): # Method not found - use_getblocktemplate = not use_getblocktemplate - try: - work = yield go() - except jsonrpc.Error_for_code(-32601): # Method not found - print >>sys.stderr, 'Error: Bitcoin version too old! Upgrade to v0.5 or newer!' - raise deferral.RetrySilentlyException() - packed_transactions = [(x['data'] if isinstance(x, dict) else x).decode('hex') for x in work['transactions']] - if 'height' not in work: - work['height'] = (yield bitcoind.rpc_getblock(work['previousblockhash']))['height'] + 1 - elif p2pool.DEBUG: - assert work['height'] == (yield bitcoind.rpc_getblock(work['previousblockhash']))['height'] + 1 - defer.returnValue(dict( - version=work['version'], - previous_block=int(work['previousblockhash'], 16), - transactions=map(bitcoin_data.tx_type.unpack, packed_transactions), - merkle_link=bitcoin_data.calculate_merkle_link([None] + map(bitcoin_data.hash256, packed_transactions), 0), - subsidy=work['coinbasevalue'], - time=work['time'] if 'time' in work else work['curtime'], - bits=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']), - coinbaseflags=work['coinbaseflags'].decode('hex') if 'coinbaseflags' in work else ''.join(x.decode('hex') for x in work['coinbaseaux'].itervalues()) if 'coinbaseaux' in work else '', - height=work['height'], - last_update=time.time(), - use_getblocktemplate=use_getblocktemplate, - )) +from bitcoin import stratum, worker_interface, helper +from util import fixargparse, jsonrpc, variable, deferral, math, logging, switchprotocol +from . import networks, web, work +import p2pool, p2pool.data as p2pool_data, p2pool.node as p2pool_node @defer.inlineCallbacks def main(args, net, datadir_path, merged_urls, worker_endpoint): @@ -68,29 +31,30 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint): print 'p2pool (version %s)' % (p2pool.__version__,) print + @defer.inlineCallbacks + def connect_p2p(): + # connect to bitcoind over bitcoin-p2p + print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port) + factory = bitcoin_p2p.ClientFactory(net.PARENT) + reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory) + def long(): + print ''' ...taking a while. Common reasons for this include all of bitcoind's connection slots being used...''' + long_dc = reactor.callLater(5, long) + yield factory.getProtocol() # waits until handshake is successful + if not long_dc.called: long_dc.cancel() + print ' ...success!' + print + defer.returnValue(factory) + + if args.testnet: # establish p2p connection first if testnet so bitcoind can work without connections + factory = yield connect_p2p() + # connect to bitcoind over JSON-RPC and do initial getmemorypool - url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port) + url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http', args.bitcoind_address, args.bitcoind_rpc_port) print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username) - bitcoind = jsonrpc.Proxy(url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30) - @deferral.retry('Error while checking Bitcoin connection:', 1) - @defer.inlineCallbacks - def check(): - if not (yield net.PARENT.RPC_CHECK(bitcoind)): - print >>sys.stderr, " Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!" - raise deferral.RetrySilentlyException() - temp_work = yield getwork(bitcoind) - if not net.VERSION_CHECK((yield bitcoind.rpc_getinfo())['version'], temp_work): - print >>sys.stderr, ' Bitcoin version too old! BIP16 support required! Upgrade to 0.6.0rc4 or greater!' - raise deferral.RetrySilentlyException() - defer.returnValue(temp_work) - temp_work = yield check() - - block_height_var = variable.Variable(None) - @defer.inlineCallbacks - def poll_height(): - block_height_var.set((yield deferral.retry('Error while calling getblockcount:')(bitcoind.rpc_getblockcount)())) - yield poll_height() - task.LoopingCall(poll_height).start(60*60) + bitcoind = jsonrpc.HTTPProxy(url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30) + yield helper.check(bitcoind, net) + temp_work = yield helper.getwork(bitcoind) bitcoind_warning_var = variable.Variable(None) @defer.inlineCallbacks @@ -98,20 +62,15 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint): errors = (yield deferral.retry('Error while calling getmininginfo:')(bitcoind.rpc_getmininginfo)())['errors'] bitcoind_warning_var.set(errors if errors != '' else None) yield poll_warnings() - task.LoopingCall(poll_warnings).start(20*60) + deferral.RobustLoopingCall(poll_warnings).start(20*60) print ' ...success!' print ' Current block hash: %x' % (temp_work['previous_block'],) - print ' Current block height: %i' % (block_height_var.value,) + print ' Current block height: %i' % (temp_work['height'] - 1,) print - # connect to bitcoind over bitcoin-p2p - print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port) - factory = bitcoin_p2p.ClientFactory(net.PARENT) - reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory) - yield factory.getProtocol() # waits until handshake is successful - print ' ...success!' - print + if not args.testnet: + factory = yield connect_p2p() print 'Determining payout address...' if args.pubkey_hash is None: @@ -143,200 +102,53 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint): print ' ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT) print - my_share_hashes = set() - my_doa_share_hashes = set() - - tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes) - shared_share_hashes = set() - ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net) - known_verified = set() print "Loading shares..." - for i, (mode, contents) in enumerate(ss.get_shares()): - if mode == 'share': - if contents.hash in tracker.items: - continue - shared_share_hashes.add(contents.hash) - contents.time_seen = 0 - tracker.add(contents) - if len(tracker.items) % 1000 == 0 and tracker.items: - print " %i" % (len(tracker.items),) - elif mode == 'verified_hash': - known_verified.add(contents) - else: - raise AssertionError() - print " ...inserting %i verified shares..." % (len(known_verified),) - for h in known_verified: - if h not in tracker.items: - ss.forget_verified_share(h) - continue - tracker.verified.add(tracker.items[h]) - print " ...done loading %i shares!" % (len(tracker.items),) + shares = {} + known_verified = set() + def share_cb(share): + share.time_seen = 0 # XXX + shares[share.hash] = share + if len(shares) % 1000 == 0 and shares: + print " %i" % (len(shares),) + ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net, share_cb, known_verified.add) + print " ...done loading %i shares (%i verified)!" % (len(shares), len(known_verified)) print - tracker.removed.watch(lambda share: ss.forget_share(share.hash)) - tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash)) - tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash)) - - print 'Initializing work...' - - - # BITCOIND WORK - - bitcoind_work = variable.Variable((yield getwork(bitcoind))) - @defer.inlineCallbacks - def work_poller(): - while True: - flag = factory.new_block.get_deferred() - try: - bitcoind_work.set((yield getwork(bitcoind, bitcoind_work.value['use_getblocktemplate']))) - except: - log.err() - yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True) - work_poller() - # PEER WORK - best_block_header = variable.Variable(None) - def handle_header(new_header): - # check that header matches current target - if not (net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(new_header)) <= bitcoind_work.value['bits'].target): - return - bitcoind_best_block = bitcoind_work.value['previous_block'] - if (best_block_header.value is None - or ( - new_header['previous_block'] == bitcoind_best_block and - bitcoin_data.hash256(bitcoin_data.block_header_type.pack(best_block_header.value)) == bitcoind_best_block - ) # new is child of current and previous is current - or ( - bitcoin_data.hash256(bitcoin_data.block_header_type.pack(new_header)) == bitcoind_best_block and - best_block_header.value['previous_block'] != bitcoind_best_block - )): # new is current and previous is not a child of current - best_block_header.set(new_header) - @defer.inlineCallbacks - def poll_header(): - handle_header((yield factory.conn.value.get_block_header(bitcoind_work.value['previous_block']))) - bitcoind_work.changed.watch(lambda _: poll_header()) - yield deferral.retry('Error while requesting best block header:')(poll_header)() + print 'Initializing work...' - # BEST SHARE + node = p2pool_node.Node(factory, bitcoind, shares.values(), known_verified, net) + yield node.start() - get_height_rel_highest = yield height_tracker.get_height_rel_highest_func(bitcoind, factory, lambda: bitcoind_work.value['previous_block'], net) + for share_hash in shares: + if share_hash not in node.tracker.items: + ss.forget_share(share_hash) + for share_hash in known_verified: + if share_hash not in node.tracker.verified.items: + ss.forget_verified_share(share_hash) + node.tracker.removed.watch(lambda share: ss.forget_share(share.hash)) + node.tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash)) - best_share_var = variable.Variable(None) - desired_var = variable.Variable(None) - def set_best_share(): - best, desired = tracker.think(get_height_rel_highest, bitcoind_work.value['previous_block'], bitcoind_work.value['bits']) - - best_share_var.set(best) - desired_var.set(desired) - bitcoind_work.changed.watch(lambda _: set_best_share()) - set_best_share() + def save_shares(): + for share in node.tracker.get_chain(node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 2*net.CHAIN_LENGTH)): + ss.add_share(share) + if share.hash in node.tracker.verified.items: + ss.add_verified_hash(share.hash) + deferral.RobustLoopingCall(save_shares).start(60) print ' ...success!' print - # setup p2p logic and join p2pool network - - class Node(p2p.Node): - def handle_shares(self, shares, peer): - if len(shares) > 5: - print 'Processing %i shares from %s...' % (len(shares), '%s:%i' % peer.addr if peer is not None else None) - - new_count = 0 - for share in shares: - if share.hash in tracker.items: - #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),) - continue - - new_count += 1 - - #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None) - - tracker.add(share) - - if new_count: - set_best_share() - - if len(shares) > 5: - print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.items), 2*net.CHAIN_LENGTH) - - @defer.inlineCallbacks - def handle_share_hashes(self, hashes, peer): - new_hashes = [x for x in hashes if x not in tracker.items] - if not new_hashes: - return - try: - shares = yield peer.get_shares( - hashes=new_hashes, - parents=0, - stops=[], - ) - except: - log.err(None, 'in handle_share_hashes:') - else: - self.handle_shares(shares, peer) - - def handle_get_shares(self, hashes, parents, stops, peer): - parents = min(parents, 1000//len(hashes)) - stops = set(stops) - shares = [] - for share_hash in hashes: - for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))): - if share.hash in stops: - break - shares.append(share) - print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1]) - return shares - - def handle_bestblock(self, header, peer): - if net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header)) > header['bits'].target: - raise p2p.PeerMisbehavingError('received block header fails PoW test') - handle_header(header) - - @deferral.retry('Error submitting primary block: (will retry)', 10, 10) - def submit_block_p2p(block): - if factory.conn.value is None: - print >>sys.stderr, 'No bitcoind connection when block submittal attempted! %s%32x' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, bitcoin_data.hash256(bitcoin_data.block_header_type.pack(block['header']))) - raise deferral.RetrySilentlyException() - factory.conn.value.send_block(block=block) - - @deferral.retry('Error submitting block: (will retry)', 10, 10) - @defer.inlineCallbacks - def submit_block_rpc(block, ignore_failure): - if bitcoind_work.value['use_getblocktemplate']: - success = yield bitcoind.rpc_getblocktemplate(dict(data=bitcoin_data.block_type.pack(block).encode('hex'))) - else: - success = yield bitcoind.rpc_getmemorypool(bitcoin_data.block_type.pack(block).encode('hex')) - success_expected = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(block['header'])) <= block['header']['bits'].target - if (not success and success_expected and not ignore_failure) or (success and not success_expected): - print >>sys.stderr, 'Block submittal result: %s Expected: %s' % (success, success_expected) - - def submit_block(block, ignore_failure): - submit_block_p2p(block) - submit_block_rpc(block, ignore_failure) - - @tracker.verified.added.watch - def _(share): - if share.pow_hash <= share.header['bits'].target: - submit_block(share.as_block(tracker), ignore_failure=True) - print - print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % (p2pool_data.format_hash(share.hash), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash) - print - def spread(): - if (get_height_rel_highest(share.header['previous_block']) > -5 or - bitcoind_work.value['previous_block'] in [share.header['previous_block'], share.header_hash]): - broadcast_share(share.hash) - spread() - reactor.callLater(5, spread) # so get_height_rel_highest can update print 'Joining p2pool network using port %i...' % (args.p2pool_port,) @defer.inlineCallbacks - def parse(x): - if ':' in x: - ip, port = x.split(':') - defer.returnValue(((yield reactor.resolve(ip)), int(port))) - else: - defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT)) + def parse(host): + port = net.P2P_PORT + if ':' in host: + host, port_str = host.split(':') + port = int(port_str) + defer.returnValue(((yield reactor.resolve(host)), port)) addrs = {} if os.path.exists(os.path.join(datadir_path, 'addrs')): @@ -360,71 +172,19 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint): except: log.err() - p2p_node = Node( - best_share_hash_func=lambda: best_share_var.value, + node.p2p_node = p2pool_node.P2PNode(node, port=args.p2pool_port, - net=net, + max_incoming_conns=args.p2pool_conns, addr_store=addrs, connect_addrs=connect_addrs, - max_incoming_conns=args.p2pool_conns, + desired_outgoing_conns=args.p2pool_outgoing_conns, ) - p2p_node.start() + node.p2p_node.start() def save_addrs(): with open(os.path.join(datadir_path, 'addrs'), 'wb') as f: - f.write(json.dumps(p2p_node.addr_store.items())) - task.LoopingCall(save_addrs).start(60) - - @best_block_header.changed.watch - def _(header): - for peer in p2p_node.peers.itervalues(): - peer.send_bestblock(header=header) - - @defer.inlineCallbacks - def broadcast_share(share_hash): - shares = [] - for share in tracker.get_chain(share_hash, min(5, tracker.get_height(share_hash))): - if share.hash in shared_share_hashes: - break - shared_share_hashes.add(share.hash) - shares.append(share) - - for peer in list(p2p_node.peers.itervalues()): - yield peer.sendShares([share for share in shares if share.peer is not peer]) - - # send share when the chain changes to their chain - best_share_var.changed.watch(broadcast_share) - - def save_shares(): - for share in tracker.get_chain(best_share_var.value, min(tracker.get_height(best_share_var.value), 2*net.CHAIN_LENGTH)): - ss.add_share(share) - if share.hash in tracker.verified.items: - ss.add_verified_hash(share.hash) - task.LoopingCall(save_shares).start(60) - - @apply - @defer.inlineCallbacks - def download_shares(): - while True: - desired = yield desired_var.get_when_satisfies(lambda val: len(val) != 0) - peer2, share_hash = random.choice(desired) - - if len(p2p_node.peers) == 0: - yield deferral.sleep(1) - continue - peer = random.choice(p2p_node.peers.values()) - - print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr) - try: - shares = yield peer.get_shares( - hashes=[share_hash], - parents=500, - stops=[], - ) - except: - log.err(None, 'in download_shares:') - else: - p2p_node.handle_shares(shares, peer) + f.write(json.dumps(node.p2p_node.addr_store.items())) + deferral.RobustLoopingCall(save_addrs).start(60) print ' ...success!' print @@ -450,13 +210,15 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint): print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1]) - get_current_txouts = lambda: p2pool_data.get_expected_payouts(tracker, best_share_var.value, bitcoind_work.value['bits'].target, bitcoind_work.value['subsidy'], net) + wb = work.WorkerBridge(node, my_pubkey_hash, args.donation_percentage, merged_urls, args.worker_fee) + web_root = web.get_web_root(wb, datadir_path, bitcoind_warning_var) + caching_wb = worker_interface.CachingWorkerBridge(wb) + worker_interface.WorkerInterface(caching_wb).attach_to(web_root, get_handler=lambda request: request.redirect('static/')) + web_serverfactory = server.Site(web_root) - wb = work.WorkerBridge(my_pubkey_hash, net, args.donation_percentage, bitcoind_work, best_block_header, merged_urls, best_share_var, tracker, my_share_hashes, my_doa_share_hashes, args.worker_fee, p2p_node, submit_block, set_best_share, broadcast_share, block_height_var) - web_root = web.get_web_root(tracker, bitcoind_work, get_current_txouts, datadir_path, net, wb.get_stale_counts, my_pubkey_hash, wb.local_rate_monitor, args.worker_fee, p2p_node, wb.my_share_hashes, wb.pseudoshare_received, wb.share_received, best_share_var, bitcoind_warning_var) - worker_interface.WorkerInterface(wb).attach_to(web_root, get_handler=lambda request: request.redirect('/static/')) - deferral.retry('Error binding to worker port:', traceback=False)(reactor.listenTCP)(worker_endpoint[1], server.Site(web_root), interface=worker_endpoint[0]) + serverfactory = switchprotocol.FirstByteSwitchFactory({'{': stratum.StratumServerFactory(caching_wb)}, web_serverfactory) + deferral.retry('Error binding to worker port:', traceback=False)(reactor.listenTCP)(worker_endpoint[1], serverfactory, interface=worker_endpoint[0]) with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f: pass @@ -468,9 +230,9 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint): # done! print 'Started successfully!' print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % (worker_endpoint[1],) - if args.donation_percentage > 0.51: + if args.donation_percentage > 1.1: print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % (args.donation_percentage,) - elif args.donation_percentage < 0.49: + elif args.donation_percentage < .9: print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % (args.donation_percentage,) else: print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % (args.donation_percentage,) @@ -483,7 +245,7 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint): sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack()) )) signal.siginterrupt(signal.SIGALRM, False) - task.LoopingCall(signal.alarm, 30).start(1) + deferral.RobustLoopingCall(signal.alarm, 30).start(1) if args.irc_announce: from twisted.words.protocols import irc @@ -495,19 +257,26 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint): print repr(line) irc.IRCClient.lineReceived(self, line) def signedOn(self): + self.in_channel = False irc.IRCClient.signedOn(self) self.factory.resetDelay() self.join(self.channel) @defer.inlineCallbacks def new_share(share): + if not self.in_channel: + return if share.pow_hash <= share.header['bits'].target and abs(share.timestamp - time.time()) < 10*60: yield deferral.sleep(random.expovariate(1/60)) message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash) if all('%x' % (share.header_hash,) not in old_message for old_message in self.recent_messages): self.say(self.channel, message) self._remember_message(message) - self.watch_id = tracker.verified.added.watch(new_share) + self.watch_id = node.tracker.verified.added.watch(new_share) self.recent_messages = [] + def joined(self, channel): + self.in_channel = True + def left(self, channel): + self.in_channel = False def _remember_message(self, message): self.recent_messages.append(message) while len(self.recent_messages) > 100: @@ -516,7 +285,7 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint): if channel == self.channel: self._remember_message(message) def connectionLost(self, reason): - tracker.verified.added.unwatch(self.watch_id) + node.tracker.verified.added.unwatch(self.watch_id) print 'IRC connection lost:', reason.getErrorMessage() class IRCClientFactory(protocol.ReconnectingClientFactory): protocol = IRCClient @@ -529,45 +298,49 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint): while True: yield deferral.sleep(3) try: - height = tracker.get_height(best_share_var.value) + height = node.tracker.get_height(node.best_share_var.value) this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % ( height, - len(tracker.verified.items), - len(tracker.items), - len(p2p_node.peers), - sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming), + len(node.tracker.verified.items), + len(node.tracker.items), + len(node.p2p_node.peers), + sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '') datums, dt = wb.local_rate_monitor.get_datums_in_last() my_att_s = sum(datum['work']/dt for datum in datums) + my_shares_per_s = sum(datum['work']/dt/bitcoin_data.target_to_average_attempts(datum['share_target']) for datum in datums) this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % ( math.format(int(my_att_s)), math.format_dt(dt), math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95), - math.format_dt(2**256 / tracker.items[best_share_var.value].max_target / my_att_s) if my_att_s and best_share_var.value else '???', + math.format_dt(1/my_shares_per_s) if my_shares_per_s else '???', ) if height > 2: (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() - stale_prop = p2pool_data.get_average_stale_prop(tracker, best_share_var.value, min(60*60//net.SHARE_PERIOD, height)) - real_att_s = p2pool_data.get_pool_attempts_per_second(tracker, best_share_var.value, min(height - 1, 60*60//net.SHARE_PERIOD)) / (1 - stale_prop) + stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, min(60*60//net.SHARE_PERIOD, height)) + real_att_s = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, min(height - 1, 60*60//net.SHARE_PERIOD)) / (1 - stale_prop) this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % ( shares, stale_orphan_shares, stale_doa_shares, math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95), math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)), - get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8, net.PARENT.SYMBOL, + node.get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8, net.PARENT.SYMBOL, ) this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % ( math.format(int(real_att_s)), 100*stale_prop, - math.format_dt(2**256 / bitcoind_work.value['bits'].target / real_att_s), + math.format_dt(2**256 / node.bitcoind_work.value['bits'].target / real_att_s), ) - for warning in p2pool_data.get_warnings(tracker, best_share_var.value, net, bitcoind_warning_var.value, bitcoind_work.value): + for warning in p2pool_data.get_warnings(node.tracker, node.best_share_var.value, net, bitcoind_warning_var.value, node.bitcoind_work.value): print >>sys.stderr, '#'*40 print >>sys.stderr, '>>> Warning: ' + warning print >>sys.stderr, '#'*40 + + if gc.garbage: + print '%i pieces of uncollectable cyclic garbage! Types: %r' % (len(gc.garbage), map(type, gc.garbage)) if this_str != last_str or time.time() > last_time + 15: print this_str @@ -581,6 +354,11 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint): log.err(None, 'Fatal error:') def run(): + if not hasattr(tcp.Client, 'abortConnection'): + print "Twisted doesn't have abortConnection! Upgrade to a newer version of Twisted to avoid memory leaks!" + print 'Pausing for 3 seconds...' + time.sleep(3) + realnets = dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name) parser = fixargparse.FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@') @@ -607,8 +385,8 @@ def run(): help='call getauxblock on this url to get work for merged mining (example: http://ncuser:ncpass@127.0.0.1:10332/)', type=str, action='append', default=[], dest='merged_urls') parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE', - help='donate this percentage of work towards the development of p2pool (default: 0.5)', - type=float, action='store', default=0.5, dest='donation_percentage') + help='donate this percentage of work towards the development of p2pool (default: 1.0)', + type=float, action='store', default=1.0, dest='donation_percentage') parser.add_argument('--iocp', help='use Windows IOCP API in order to avoid errors due to large number of sockets being open', action='store_true', default=False, dest='iocp') @@ -632,6 +410,9 @@ def run(): p2pool_group.add_argument('--max-conns', metavar='CONNS', help='maximum incoming connections (default: 40)', type=int, action='store', default=40, dest='p2pool_conns') + p2pool_group.add_argument('--outgoing-conns', metavar='CONNS', + help='outgoing connections (default: 6)', + type=int, action='store', default=6, dest='p2pool_outgoing_conns') worker_group = parser.add_argument_group('worker interface') worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT', @@ -648,6 +429,9 @@ def run(): bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT', help='''connect to JSON-RPC interface at this port (default: %s )''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())), type=int, action='store', default=None, dest='bitcoind_rpc_port') + bitcoind_group.add_argument('--bitcoind-rpc-ssl', + help='connect to JSON-RPC interface using SSL', + action='store_true', default=False, dest='bitcoind_rpc_ssl') bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT', help='''connect to P2P interface at this port (default: %s )''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())), type=int, action='store', default=None, dest='bitcoind_p2p_port') @@ -661,6 +445,8 @@ def run(): if args.debug: p2pool.DEBUG = True defer.setDebugging(True) + else: + p2pool.DEBUG = False net_name = args.net_name + ('_testnet' if args.testnet else '') net = networks.nets[net_name] @@ -683,17 +469,23 @@ def run(): '''rpcpassword=%x\r\n''' '''\r\n''' '''Keep that password secret! After creating the file, restart Bitcoin.''' % (conf_path, random.randrange(2**128))) - with open(conf_path, 'rb') as f: - cp = ConfigParser.RawConfigParser() - cp.readfp(StringIO.StringIO('[x]\r\n' + f.read())) - for conf_name, var_name, var_type in [ - ('rpcuser', 'bitcoind_rpc_username', str), - ('rpcpassword', 'bitcoind_rpc_password', str), - ('rpcport', 'bitcoind_rpc_port', int), - ('port', 'bitcoind_p2p_port', int), - ]: - if getattr(args, var_name) is None and cp.has_option('x', conf_name): - setattr(args, var_name, var_type(cp.get('x', conf_name))) + conf = open(conf_path, 'rb').read() + contents = {} + for line in conf.splitlines(True): + if '#' in line: + line = line[:line.index('#')] + if '=' not in line: + continue + k, v = line.split('=', 1) + contents[k.strip()] = v.strip() + for conf_name, var_name, var_type in [ + ('rpcuser', 'bitcoind_rpc_username', str), + ('rpcpassword', 'bitcoind_rpc_password', str), + ('rpcport', 'bitcoind_rpc_port', int), + ('port', 'bitcoind_p2p_port', int), + ]: + if getattr(args, var_name) is None and conf_name in contents: + setattr(args, var_name, var_type(contents[conf_name])) if args.bitcoind_rpc_password is None: parser.error('''Bitcoin configuration file didn't contain an rpcpassword= line! Add one!''') @@ -709,6 +501,9 @@ def run(): if args.p2pool_port is None: args.p2pool_port = net.P2P_PORT + if args.p2pool_outgoing_conns > 10: + parser.error('''--outgoing-conns can't be more than 10''') + if args.worker_endpoint is None: worker_endpoint = '', net.WORKER_PORT elif ':' not in args.worker_endpoint: @@ -746,7 +541,7 @@ def run(): logfile.reopen() print '...and reopened %r after catching SIGUSR1.' % (args.logfile,) signal.signal(signal.SIGUSR1, sigusr1) - task.LoopingCall(logfile.reopen).start(5) + deferral.RobustLoopingCall(logfile.reopen).start(5) class ErrorReporter(object): def __init__(self):