1 from __future__ import division
14 if '--iocp' in sys.argv:
15 from twisted.internet import iocpreactor
17 from twisted.internet import defer, reactor, protocol, tcp
18 from twisted.web import server
19 from twisted.python import log
20 from nattraverso import portmapper, ipdiscover
22 import bitcoin.p2p as bitcoin_p2p, bitcoin.data as bitcoin_data
23 from bitcoin import stratum, worker_interface, helper
24 from util import fixargparse, jsonrpc, variable, deferral, math, logging, switchprotocol
25 from . import networks, web, work
26 import p2pool, p2pool.data as p2pool_data, p2pool.node as p2pool_node
28 @defer.inlineCallbacks
29 def main(args, net, datadir_path, merged_urls, worker_endpoint):
31 print 'p2pool (version %s)' % (p2pool.__version__,)
34 @defer.inlineCallbacks
36 # connect to bitcoind over bitcoin-p2p
37 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
38 factory = bitcoin_p2p.ClientFactory(net.PARENT)
39 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
40 yield factory.getProtocol() # waits until handshake is successful
43 defer.returnValue(factory)
45 if args.testnet: # establish p2p connection first if testnet so bitcoind can work without connections
46 factory = yield connect_p2p()
48 # connect to bitcoind over JSON-RPC and do initial getmemorypool
49 url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http', args.bitcoind_address, args.bitcoind_rpc_port)
50 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
51 bitcoind = jsonrpc.HTTPProxy(url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30)
52 yield helper.check(bitcoind, net)
53 temp_work = yield helper.getwork(bitcoind)
55 bitcoind_warning_var = variable.Variable(None)
56 @defer.inlineCallbacks
58 errors = (yield deferral.retry('Error while calling getmininginfo:')(bitcoind.rpc_getmininginfo)())['errors']
59 bitcoind_warning_var.set(errors if errors != '' else None)
61 deferral.RobustLoopingCall(poll_warnings).start(20*60)
64 print ' Current block hash: %x' % (temp_work['previous_block'],)
65 print ' Current block height: %i' % (temp_work['height'] - 1,)
69 factory = yield connect_p2p()
71 print 'Determining payout address...'
72 if args.pubkey_hash is None:
73 address_path = os.path.join(datadir_path, 'cached_payout_address')
75 if os.path.exists(address_path):
76 with open(address_path, 'rb') as f:
77 address = f.read().strip('\r\n')
78 print ' Loaded cached address: %s...' % (address,)
82 if address is not None:
83 res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))()
84 if not res['isvalid'] or not res['ismine']:
85 print ' Cached address is either invalid or not controlled by local bitcoind!'
89 print ' Getting payout address from bitcoind...'
90 address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()
92 with open(address_path, 'wb') as f:
95 my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(address, net.PARENT)
97 my_pubkey_hash = args.pubkey_hash
98 print ' ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)
101 print "Loading shares..."
103 known_verified = set()
105 share.time_seen = 0 # XXX
106 shares[share.hash] = share
107 if len(shares) % 1000 == 0 and shares:
108 print " %i" % (len(shares),)
109 ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net, share_cb, known_verified.add)
110 print " ...done loading %i shares (%i verified)!" % (len(shares), len(known_verified))
114 print 'Initializing work...'
116 node = p2pool_node.Node(factory, bitcoind, shares.values(), known_verified, net)
119 for share_hash in shares:
120 if share_hash not in node.tracker.items:
121 ss.forget_share(share_hash)
122 for share_hash in known_verified:
123 if share_hash not in node.tracker.verified.items:
124 ss.forget_verified_share(share_hash)
125 node.tracker.removed.watch(lambda share: ss.forget_share(share.hash))
126 node.tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
129 for share in node.tracker.get_chain(node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 2*net.CHAIN_LENGTH)):
131 if share.hash in node.tracker.verified.items:
132 ss.add_verified_hash(share.hash)
133 deferral.RobustLoopingCall(save_shares).start(60)
139 print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
141 @defer.inlineCallbacks
145 host, port_str = host.split(':')
147 defer.returnValue(((yield reactor.resolve(host)), port))
150 if os.path.exists(os.path.join(datadir_path, 'addrs')):
152 with open(os.path.join(datadir_path, 'addrs'), 'rb') as f:
153 addrs.update(dict((tuple(k), v) for k, v in json.loads(f.read())))
155 print >>sys.stderr, 'error parsing addrs'
156 for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
159 if addr not in addrs:
160 addrs[addr] = (0, time.time(), time.time())
164 connect_addrs = set()
165 for addr_df in map(parse, args.p2pool_nodes):
167 connect_addrs.add((yield addr_df))
171 node.p2p_node = p2pool_node.P2PNode(node,
172 port=args.p2pool_port,
173 max_incoming_conns=args.p2pool_conns,
175 connect_addrs=connect_addrs,
176 desired_outgoing_conns=args.p2pool_outgoing_conns,
178 node.p2p_node.start()
181 with open(os.path.join(datadir_path, 'addrs'), 'wb') as f:
182 f.write(json.dumps(node.p2p_node.addr_store.items()))
183 deferral.RobustLoopingCall(save_addrs).start(60)
189 @defer.inlineCallbacks
193 is_lan, lan_ip = yield ipdiscover.get_local_ip()
195 pm = yield portmapper.get_port_mapper()
196 yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
197 except defer.TimeoutError:
201 log.err(None, 'UPnP error:')
202 yield deferral.sleep(random.expovariate(1/120))
205 # start listening for workers with a JSON-RPC server
207 print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1])
209 wb = work.WorkerBridge(node, my_pubkey_hash, args.donation_percentage, merged_urls, args.worker_fee)
210 web_root = web.get_web_root(wb, datadir_path, bitcoind_warning_var)
211 caching_wb = worker_interface.CachingWorkerBridge(wb)
212 worker_interface.WorkerInterface(caching_wb).attach_to(web_root, get_handler=lambda request: request.redirect('/static/'))
213 web_serverfactory = server.Site(web_root)
216 serverfactory = switchprotocol.FirstByteSwitchFactory({'{': stratum.StratumServerFactory(caching_wb)}, web_serverfactory)
217 deferral.retry('Error binding to worker port:', traceback=False)(reactor.listenTCP)(worker_endpoint[1], serverfactory, interface=worker_endpoint[0])
219 with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
227 print 'Started successfully!'
228 print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % (worker_endpoint[1],)
229 if args.donation_percentage > 1.1:
230 print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % (args.donation_percentage,)
231 elif args.donation_percentage < .9:
232 print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % (args.donation_percentage,)
234 print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % (args.donation_percentage,)
235 print 'You can increase this amount with --give-author argument! (or decrease it, if you must)'
239 if hasattr(signal, 'SIGALRM'):
240 signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
241 sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
243 signal.siginterrupt(signal.SIGALRM, False)
244 deferral.RobustLoopingCall(signal.alarm, 30).start(1)
246 if args.irc_announce:
247 from twisted.words.protocols import irc
248 class IRCClient(irc.IRCClient):
249 nickname = 'p2pool%02i' % (random.randrange(100),)
250 channel = net.ANNOUNCE_CHANNEL
251 def lineReceived(self, line):
254 irc.IRCClient.lineReceived(self, line)
256 self.in_channel = False
257 irc.IRCClient.signedOn(self)
258 self.factory.resetDelay()
259 self.join(self.channel)
260 @defer.inlineCallbacks
261 def new_share(share):
262 if not self.in_channel:
264 if share.pow_hash <= share.header['bits'].target and abs(share.timestamp - time.time()) < 10*60:
265 yield deferral.sleep(random.expovariate(1/60))
266 message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
267 if all('%x' % (share.header_hash,) not in old_message for old_message in self.recent_messages):
268 self.say(self.channel, message)
269 self._remember_message(message)
270 self.watch_id = node.tracker.verified.added.watch(new_share)
271 self.recent_messages = []
272 def joined(self, channel):
273 self.in_channel = True
274 def left(self, channel):
275 self.in_channel = False
276 def _remember_message(self, message):
277 self.recent_messages.append(message)
278 while len(self.recent_messages) > 100:
279 self.recent_messages.pop(0)
280 def privmsg(self, user, channel, message):
281 if channel == self.channel:
282 self._remember_message(message)
283 def connectionLost(self, reason):
284 node.tracker.verified.added.unwatch(self.watch_id)
285 print 'IRC connection lost:', reason.getErrorMessage()
286 class IRCClientFactory(protocol.ReconnectingClientFactory):
288 reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())
290 @defer.inlineCallbacks
295 yield deferral.sleep(3)
297 height = node.tracker.get_height(node.best_share_var.value)
298 this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
300 len(node.tracker.verified.items),
301 len(node.tracker.items),
302 len(node.p2p_node.peers),
303 sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
304 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
306 datums, dt = wb.local_rate_monitor.get_datums_in_last()
307 my_att_s = sum(datum['work']/dt for datum in datums)
308 my_shares_per_s = sum(datum['work']/dt/bitcoin_data.target_to_average_attempts(datum['share_target']) for datum in datums)
309 this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
310 math.format(int(my_att_s)),
312 math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95),
313 math.format_dt(1/my_shares_per_s) if my_shares_per_s else '???',
317 (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()
318 stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, min(60*60//net.SHARE_PERIOD, height))
319 real_att_s = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, min(height - 1, 60*60//net.SHARE_PERIOD)) / (1 - stale_prop)
321 this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % (
322 shares, stale_orphan_shares, stale_doa_shares,
323 math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
324 math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
325 node.get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8, net.PARENT.SYMBOL,
327 this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
328 math.format(int(real_att_s)),
330 math.format_dt(2**256 / node.bitcoind_work.value['bits'].target / real_att_s),
333 for warning in p2pool_data.get_warnings(node.tracker, node.best_share_var.value, net, bitcoind_warning_var.value, node.bitcoind_work.value):
334 print >>sys.stderr, '#'*40
335 print >>sys.stderr, '>>> Warning: ' + warning
336 print >>sys.stderr, '#'*40
339 print '%i pieces of uncollectable cyclic garbage! Types: %r' % (len(gc.garbage), map(type, gc.garbage))
341 if this_str != last_str or time.time() > last_time + 15:
344 last_time = time.time()
350 log.err(None, 'Fatal error:')
353 if not hasattr(tcp.Client, 'abortConnection'):
354 print "Twisted doesn't have abortConnection! Upgrade to a newer version of Twisted to avoid memory leaks!"
355 print 'Pausing for 3 seconds...'
358 realnets = dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
360 parser = fixargparse.FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
361 parser.add_argument('--version', action='version', version=p2pool.__version__)
362 parser.add_argument('--net',
363 help='use specified network (default: bitcoin)',
364 action='store', choices=sorted(realnets), default='bitcoin', dest='net_name')
365 parser.add_argument('--testnet',
366 help='''use the network's testnet''',
367 action='store_const', const=True, default=False, dest='testnet')
368 parser.add_argument('--debug',
369 help='enable debugging mode',
370 action='store_const', const=True, default=False, dest='debug')
371 parser.add_argument('-a', '--address',
372 help='generate payouts to this address (default: <address requested from bitcoind>)',
373 type=str, action='store', default=None, dest='address')
374 parser.add_argument('--datadir',
375 help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
376 type=str, action='store', default=None, dest='datadir')
377 parser.add_argument('--logfile',
378 help='''log to this file (default: data/<NET>/log)''',
379 type=str, action='store', default=None, dest='logfile')
380 parser.add_argument('--merged',
381 help='call getauxblock on this url to get work for merged mining (example: http://ncuser:ncpass@127.0.0.1:10332/)',
382 type=str, action='append', default=[], dest='merged_urls')
383 parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
384 help='donate this percentage of work towards the development of p2pool (default: 1.0)',
385 type=float, action='store', default=1.0, dest='donation_percentage')
386 parser.add_argument('--iocp',
387 help='use Windows IOCP API in order to avoid errors due to large number of sockets being open',
388 action='store_true', default=False, dest='iocp')
389 parser.add_argument('--irc-announce',
390 help='announce any blocks found on irc://irc.freenode.net/#p2pool',
391 action='store_true', default=False, dest='irc_announce')
392 parser.add_argument('--no-bugreport',
393 help='disable submitting caught exceptions to the author',
394 action='store_true', default=False, dest='no_bugreport')
396 p2pool_group = parser.add_argument_group('p2pool interface')
397 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
398 help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
399 type=int, action='store', default=None, dest='p2pool_port')
400 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
401 help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
402 type=str, action='append', default=[], dest='p2pool_nodes')
403 parser.add_argument('--disable-upnp',
404 help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
405 action='store_false', default=True, dest='upnp')
406 p2pool_group.add_argument('--max-conns', metavar='CONNS',
407 help='maximum incoming connections (default: 40)',
408 type=int, action='store', default=40, dest='p2pool_conns')
409 p2pool_group.add_argument('--outgoing-conns', metavar='CONNS',
410 help='outgoing connections (default: 6)',
411 type=int, action='store', default=6, dest='p2pool_outgoing_conns')
413 worker_group = parser.add_argument_group('worker interface')
414 worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT',
415 help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
416 type=str, action='store', default=None, dest='worker_endpoint')
417 worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
418 help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
419 type=float, action='store', default=0, dest='worker_fee')
421 bitcoind_group = parser.add_argument_group('bitcoind interface')
422 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
423 help='connect to this address (default: 127.0.0.1)',
424 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
425 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
426 help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
427 type=int, action='store', default=None, dest='bitcoind_rpc_port')
428 bitcoind_group.add_argument('--bitcoind-rpc-ssl',
429 help='connect to JSON-RPC interface using SSL',
430 action='store_true', default=False, dest='bitcoind_rpc_ssl')
431 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
432 help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
433 type=int, action='store', default=None, dest='bitcoind_p2p_port')
435 bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
436 help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
437 type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
439 args = parser.parse_args()
443 defer.setDebugging(True)
447 net_name = args.net_name + ('_testnet' if args.testnet else '')
448 net = networks.nets[net_name]
450 datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
451 if not os.path.exists(datadir_path):
452 os.makedirs(datadir_path)
454 if len(args.bitcoind_rpc_userpass) > 2:
455 parser.error('a maximum of two arguments are allowed')
456 args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
458 if args.bitcoind_rpc_password is None:
459 conf_path = net.PARENT.CONF_FILE_FUNC()
460 if not os.path.exists(conf_path):
461 parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
462 '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
465 '''rpcpassword=%x\r\n'''
467 '''Keep that password secret! After creating the file, restart Bitcoin.''' % (conf_path, random.randrange(2**128)))
468 conf = open(conf_path, 'rb').read()
470 for line in conf.splitlines(True):
472 line = line[:line.index('#')]
475 k, v = line.split('=', 1)
476 contents[k.strip()] = v.strip()
477 for conf_name, var_name, var_type in [
478 ('rpcuser', 'bitcoind_rpc_username', str),
479 ('rpcpassword', 'bitcoind_rpc_password', str),
480 ('rpcport', 'bitcoind_rpc_port', int),
481 ('port', 'bitcoind_p2p_port', int),
483 if getattr(args, var_name) is None and conf_name in contents:
484 setattr(args, var_name, var_type(contents[conf_name]))
485 if args.bitcoind_rpc_password is None:
486 parser.error('''Bitcoin configuration file didn't contain an rpcpassword= line! Add one!''')
488 if args.bitcoind_rpc_username is None:
489 args.bitcoind_rpc_username = ''
491 if args.bitcoind_rpc_port is None:
492 args.bitcoind_rpc_port = net.PARENT.RPC_PORT
494 if args.bitcoind_p2p_port is None:
495 args.bitcoind_p2p_port = net.PARENT.P2P_PORT
497 if args.p2pool_port is None:
498 args.p2pool_port = net.P2P_PORT
500 if args.p2pool_outgoing_conns > 10:
501 parser.error('''--outgoing-conns can't be more than 10''')
503 if args.worker_endpoint is None:
504 worker_endpoint = '', net.WORKER_PORT
505 elif ':' not in args.worker_endpoint:
506 worker_endpoint = '', int(args.worker_endpoint)
508 addr, port = args.worker_endpoint.rsplit(':', 1)
509 worker_endpoint = addr, int(port)
511 if args.address is not None:
513 args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
515 parser.error('error parsing address: ' + repr(e))
517 args.pubkey_hash = None
519 def separate_url(url):
520 s = urlparse.urlsplit(url)
521 if '@' not in s.netloc:
522 parser.error('merged url netloc must contain an "@"')
523 userpass, new_netloc = s.netloc.rsplit('@', 1)
524 return urlparse.urlunsplit(s._replace(netloc=new_netloc)), userpass
525 merged_urls = map(separate_url, args.merged_urls)
527 if args.logfile is None:
528 args.logfile = os.path.join(datadir_path, 'log')
530 logfile = logging.LogFile(args.logfile)
531 pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
532 sys.stdout = logging.AbortPipe(pipe)
533 sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
534 if hasattr(signal, "SIGUSR1"):
535 def sigusr1(signum, frame):
536 print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
538 print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
539 signal.signal(signal.SIGUSR1, sigusr1)
540 deferral.RobustLoopingCall(logfile.reopen).start(5)
542 class ErrorReporter(object):
544 self.last_sent = None
546 def emit(self, eventDict):
547 if not eventDict["isError"]:
550 if self.last_sent is not None and time.time() < self.last_sent + 5:
552 self.last_sent = time.time()
554 if 'failure' in eventDict:
555 text = ((eventDict.get('why') or 'Unhandled Error')
556 + '\n' + eventDict['failure'].getTraceback())
558 text = " ".join([str(m) for m in eventDict["message"]]) + "\n"
560 from twisted.web import client
562 url='http://u.forre.st/p2pool_error.cgi',
564 postdata=p2pool.__version__ + ' ' + net.NAME + '\n' + text,
566 ).addBoth(lambda x: None)
567 if not args.no_bugreport:
568 log.addObserver(ErrorReporter().emit)
570 reactor.callWhenRunning(main, args, net, datadir_path, merged_urls, worker_endpoint)