1 from __future__ import division
14 if '--iocp' in sys.argv:
15 from twisted.internet import iocpreactor
17 from twisted.internet import defer, reactor, protocol, task, tcp
18 from twisted.web import server
19 from twisted.python import log
20 from nattraverso import portmapper, ipdiscover
22 import bitcoin.p2p as bitcoin_p2p, bitcoin.data as bitcoin_data
23 from bitcoin import stratum, worker_interface, helper
24 from util import fixargparse, jsonrpc, variable, deferral, math, logging, switchprotocol
25 from . import networks, web, work
26 import p2pool, p2pool.data as p2pool_data, p2pool.node as p2pool_node
28 @defer.inlineCallbacks
29 def main(args, net, datadir_path, merged_urls, worker_endpoint):
31 print 'p2pool (version %s)' % (p2pool.__version__,)
34 @defer.inlineCallbacks
36 # connect to bitcoind over bitcoin-p2p
37 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
38 factory = bitcoin_p2p.ClientFactory(net.PARENT)
39 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
40 yield factory.getProtocol() # waits until handshake is successful
43 defer.returnValue(factory)
45 if args.testnet: # establish p2p connection first if testnet so bitcoind can work without connections
46 factory = yield connect_p2p()
48 # connect to bitcoind over JSON-RPC and do initial getmemorypool
49 url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http', args.bitcoind_address, args.bitcoind_rpc_port)
50 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
51 bitcoind = jsonrpc.HTTPProxy(url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30)
52 yield helper.check(bitcoind, net)
53 temp_work = yield helper.getwork(bitcoind)
55 bitcoind_warning_var = variable.Variable(None)
56 @defer.inlineCallbacks
58 errors = (yield deferral.retry('Error while calling getmininginfo:')(bitcoind.rpc_getmininginfo)())['errors']
59 bitcoind_warning_var.set(errors if errors != '' else None)
61 task.LoopingCall(poll_warnings).start(20*60)
64 print ' Current block hash: %x' % (temp_work['previous_block'],)
65 print ' Current block height: %i' % (temp_work['height'] - 1,)
69 factory = yield connect_p2p()
71 print 'Determining payout address...'
72 if args.pubkey_hash is None:
73 address_path = os.path.join(datadir_path, 'cached_payout_address')
75 if os.path.exists(address_path):
76 with open(address_path, 'rb') as f:
77 address = f.read().strip('\r\n')
78 print ' Loaded cached address: %s...' % (address,)
82 if address is not None:
83 res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))()
84 if not res['isvalid'] or not res['ismine']:
85 print ' Cached address is either invalid or not controlled by local bitcoind!'
89 print ' Getting payout address from bitcoind...'
90 address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()
92 with open(address_path, 'wb') as f:
95 my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(address, net.PARENT)
97 my_pubkey_hash = args.pubkey_hash
98 print ' ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)
101 ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
103 known_verified = set()
104 print "Loading shares..."
105 for i, (mode, contents) in enumerate(ss.get_shares()):
107 contents.time_seen = 0
108 shares[contents.hash] = contents
109 if len(shares) % 1000 == 0 and shares:
110 print " %i" % (len(shares),)
111 elif mode == 'verified_hash':
112 known_verified.add(contents)
114 raise AssertionError()
115 print " ...done loading %i shares (%i verified)!" % (len(shares), len(known_verified))
119 print 'Initializing work...'
121 node = p2pool_node.Node(factory, bitcoind, shares.values(), known_verified, net)
124 for share_hash in shares:
125 if share_hash not in node.tracker.items:
126 ss.forget_share(share_hash)
127 for share_hash in known_verified:
128 if share_hash not in node.tracker.verified.items:
129 ss.forget_verified_share(share_hash)
130 del shares, known_verified
131 node.tracker.removed.watch(lambda share: ss.forget_share(share.hash))
132 node.tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
135 for share in node.tracker.get_chain(node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 2*net.CHAIN_LENGTH)):
137 if share.hash in node.tracker.verified.items:
138 ss.add_verified_hash(share.hash)
139 task.LoopingCall(save_shares).start(60)
145 print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
147 @defer.inlineCallbacks
151 host, port_str = host.split(':')
153 defer.returnValue(((yield reactor.resolve(host)), port))
156 if os.path.exists(os.path.join(datadir_path, 'addrs')):
158 with open(os.path.join(datadir_path, 'addrs'), 'rb') as f:
159 addrs.update(dict((tuple(k), v) for k, v in json.loads(f.read())))
161 print >>sys.stderr, 'error parsing addrs'
162 for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
165 if addr not in addrs:
166 addrs[addr] = (0, time.time(), time.time())
170 connect_addrs = set()
171 for addr_df in map(parse, args.p2pool_nodes):
173 connect_addrs.add((yield addr_df))
177 node.p2p_node = p2pool_node.P2PNode(node,
178 port=args.p2pool_port,
179 max_incoming_conns=args.p2pool_conns,
181 connect_addrs=connect_addrs,
182 desired_outgoing_conns=args.p2pool_outgoing_conns,
184 node.p2p_node.start()
187 with open(os.path.join(datadir_path, 'addrs'), 'wb') as f:
188 f.write(json.dumps(node.p2p_node.addr_store.items()))
189 task.LoopingCall(save_addrs).start(60)
195 @defer.inlineCallbacks
199 is_lan, lan_ip = yield ipdiscover.get_local_ip()
201 pm = yield portmapper.get_port_mapper()
202 yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
203 except defer.TimeoutError:
207 log.err(None, 'UPnP error:')
208 yield deferral.sleep(random.expovariate(1/120))
211 # start listening for workers with a JSON-RPC server
213 print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1])
215 wb = work.WorkerBridge(node, my_pubkey_hash, args.donation_percentage, merged_urls, args.worker_fee)
216 web_root = web.get_web_root(wb, datadir_path, bitcoind_warning_var)
217 caching_wb = worker_interface.CachingWorkerBridge(wb)
218 worker_interface.WorkerInterface(caching_wb).attach_to(web_root, get_handler=lambda request: request.redirect('/static/'))
219 web_serverfactory = server.Site(web_root)
222 serverfactory = switchprotocol.FirstByteSwitchFactory({'{': stratum.StratumServerFactory(caching_wb)}, web_serverfactory)
223 deferral.retry('Error binding to worker port:', traceback=False)(reactor.listenTCP)(worker_endpoint[1], serverfactory, interface=worker_endpoint[0])
225 with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
233 print 'Started successfully!'
234 print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % (worker_endpoint[1],)
235 if args.donation_percentage > 1.1:
236 print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % (args.donation_percentage,)
237 elif args.donation_percentage < .9:
238 print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % (args.donation_percentage,)
240 print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % (args.donation_percentage,)
241 print 'You can increase this amount with --give-author argument! (or decrease it, if you must)'
245 if hasattr(signal, 'SIGALRM'):
246 signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
247 sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
249 signal.siginterrupt(signal.SIGALRM, False)
250 task.LoopingCall(signal.alarm, 30).start(1)
252 if args.irc_announce:
253 from twisted.words.protocols import irc
254 class IRCClient(irc.IRCClient):
255 nickname = 'p2pool%02i' % (random.randrange(100),)
256 channel = net.ANNOUNCE_CHANNEL
257 def lineReceived(self, line):
260 irc.IRCClient.lineReceived(self, line)
262 self.in_channel = False
263 irc.IRCClient.signedOn(self)
264 self.factory.resetDelay()
265 self.join(self.channel)
266 @defer.inlineCallbacks
267 def new_share(share):
268 if not self.in_channel:
270 if share.pow_hash <= share.header['bits'].target and abs(share.timestamp - time.time()) < 10*60:
271 yield deferral.sleep(random.expovariate(1/60))
272 message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
273 if all('%x' % (share.header_hash,) not in old_message for old_message in self.recent_messages):
274 self.say(self.channel, message)
275 self._remember_message(message)
276 self.watch_id = node.tracker.verified.added.watch(new_share)
277 self.recent_messages = []
278 def joined(self, channel):
279 self.in_channel = True
280 def left(self, channel):
281 self.in_channel = False
282 def _remember_message(self, message):
283 self.recent_messages.append(message)
284 while len(self.recent_messages) > 100:
285 self.recent_messages.pop(0)
286 def privmsg(self, user, channel, message):
287 if channel == self.channel:
288 self._remember_message(message)
289 def connectionLost(self, reason):
290 node.tracker.verified.added.unwatch(self.watch_id)
291 print 'IRC connection lost:', reason.getErrorMessage()
292 class IRCClientFactory(protocol.ReconnectingClientFactory):
294 reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())
296 @defer.inlineCallbacks
301 yield deferral.sleep(3)
303 height = node.tracker.get_height(node.best_share_var.value)
304 this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
306 len(node.tracker.verified.items),
307 len(node.tracker.items),
308 len(node.p2p_node.peers),
309 sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
310 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
312 datums, dt = wb.local_rate_monitor.get_datums_in_last()
313 my_att_s = sum(datum['work']/dt for datum in datums)
314 this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
315 math.format(int(my_att_s)),
317 math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95),
318 math.format_dt(2**256 / node.tracker.items[node.best_share_var.value].max_target / my_att_s) if my_att_s and node.best_share_var.value else '???',
322 (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()
323 stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, min(60*60//net.SHARE_PERIOD, height))
324 real_att_s = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, min(height - 1, 60*60//net.SHARE_PERIOD)) / (1 - stale_prop)
326 this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % (
327 shares, stale_orphan_shares, stale_doa_shares,
328 math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
329 math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
330 node.get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8, net.PARENT.SYMBOL,
332 this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
333 math.format(int(real_att_s)),
335 math.format_dt(2**256 / node.bitcoind_work.value['bits'].target / real_att_s),
338 for warning in p2pool_data.get_warnings(node.tracker, node.best_share_var.value, net, bitcoind_warning_var.value, node.bitcoind_work.value):
339 print >>sys.stderr, '#'*40
340 print >>sys.stderr, '>>> Warning: ' + warning
341 print >>sys.stderr, '#'*40
344 print '%i pieces of uncollectable cyclic garbage! Types: %r' % (len(gc.garbage), map(type, gc.garbage))
346 if this_str != last_str or time.time() > last_time + 15:
349 last_time = time.time()
355 log.err(None, 'Fatal error:')
358 if not hasattr(tcp.Client, 'abortConnection'):
359 print "Twisted doesn't have abortConnection! Upgrade to a newer version of Twisted to avoid memory leaks!"
360 print 'Pausing for 3 seconds...'
363 realnets = dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
365 parser = fixargparse.FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
366 parser.add_argument('--version', action='version', version=p2pool.__version__)
367 parser.add_argument('--net',
368 help='use specified network (default: bitcoin)',
369 action='store', choices=sorted(realnets), default='bitcoin', dest='net_name')
370 parser.add_argument('--testnet',
371 help='''use the network's testnet''',
372 action='store_const', const=True, default=False, dest='testnet')
373 parser.add_argument('--debug',
374 help='enable debugging mode',
375 action='store_const', const=True, default=False, dest='debug')
376 parser.add_argument('-a', '--address',
377 help='generate payouts to this address (default: <address requested from bitcoind>)',
378 type=str, action='store', default=None, dest='address')
379 parser.add_argument('--datadir',
380 help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
381 type=str, action='store', default=None, dest='datadir')
382 parser.add_argument('--logfile',
383 help='''log to this file (default: data/<NET>/log)''',
384 type=str, action='store', default=None, dest='logfile')
385 parser.add_argument('--merged',
386 help='call getauxblock on this url to get work for merged mining (example: http://ncuser:ncpass@127.0.0.1:10332/)',
387 type=str, action='append', default=[], dest='merged_urls')
388 parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
389 help='donate this percentage of work towards the development of p2pool (default: 1.0)',
390 type=float, action='store', default=1.0, dest='donation_percentage')
391 parser.add_argument('--iocp',
392 help='use Windows IOCP API in order to avoid errors due to large number of sockets being open',
393 action='store_true', default=False, dest='iocp')
394 parser.add_argument('--irc-announce',
395 help='announce any blocks found on irc://irc.freenode.net/#p2pool',
396 action='store_true', default=False, dest='irc_announce')
397 parser.add_argument('--no-bugreport',
398 help='disable submitting caught exceptions to the author',
399 action='store_true', default=False, dest='no_bugreport')
401 p2pool_group = parser.add_argument_group('p2pool interface')
402 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
403 help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
404 type=int, action='store', default=None, dest='p2pool_port')
405 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
406 help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
407 type=str, action='append', default=[], dest='p2pool_nodes')
408 parser.add_argument('--disable-upnp',
409 help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
410 action='store_false', default=True, dest='upnp')
411 p2pool_group.add_argument('--max-conns', metavar='CONNS',
412 help='maximum incoming connections (default: 40)',
413 type=int, action='store', default=40, dest='p2pool_conns')
414 p2pool_group.add_argument('--outgoing-conns', metavar='CONNS',
415 help='outgoing connections (default: 6)',
416 type=int, action='store', default=6, dest='p2pool_outgoing_conns')
418 worker_group = parser.add_argument_group('worker interface')
419 worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT',
420 help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
421 type=str, action='store', default=None, dest='worker_endpoint')
422 worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
423 help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
424 type=float, action='store', default=0, dest='worker_fee')
426 bitcoind_group = parser.add_argument_group('bitcoind interface')
427 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
428 help='connect to this address (default: 127.0.0.1)',
429 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
430 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
431 help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
432 type=int, action='store', default=None, dest='bitcoind_rpc_port')
433 bitcoind_group.add_argument('--bitcoind-rpc-ssl',
434 help='connect to JSON-RPC interface using SSL',
435 action='store_true', default=False, dest='bitcoind_rpc_ssl')
436 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
437 help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
438 type=int, action='store', default=None, dest='bitcoind_p2p_port')
440 bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
441 help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
442 type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
444 args = parser.parse_args()
448 defer.setDebugging(True)
452 net_name = args.net_name + ('_testnet' if args.testnet else '')
453 net = networks.nets[net_name]
455 datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
456 if not os.path.exists(datadir_path):
457 os.makedirs(datadir_path)
459 if len(args.bitcoind_rpc_userpass) > 2:
460 parser.error('a maximum of two arguments are allowed')
461 args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
463 if args.bitcoind_rpc_password is None:
464 conf_path = net.PARENT.CONF_FILE_FUNC()
465 if not os.path.exists(conf_path):
466 parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
467 '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
470 '''rpcpassword=%x\r\n'''
472 '''Keep that password secret! After creating the file, restart Bitcoin.''' % (conf_path, random.randrange(2**128)))
473 conf = open(conf_path, 'rb').read()
475 for line in conf.splitlines(True):
477 line = line[:line.index('#')]
480 k, v = line.split('=', 1)
481 contents[k.strip()] = v.strip()
482 for conf_name, var_name, var_type in [
483 ('rpcuser', 'bitcoind_rpc_username', str),
484 ('rpcpassword', 'bitcoind_rpc_password', str),
485 ('rpcport', 'bitcoind_rpc_port', int),
486 ('port', 'bitcoind_p2p_port', int),
488 if getattr(args, var_name) is None and conf_name in contents:
489 setattr(args, var_name, var_type(contents[conf_name]))
490 if args.bitcoind_rpc_password is None:
491 parser.error('''Bitcoin configuration file didn't contain an rpcpassword= line! Add one!''')
493 if args.bitcoind_rpc_username is None:
494 args.bitcoind_rpc_username = ''
496 if args.bitcoind_rpc_port is None:
497 args.bitcoind_rpc_port = net.PARENT.RPC_PORT
499 if args.bitcoind_p2p_port is None:
500 args.bitcoind_p2p_port = net.PARENT.P2P_PORT
502 if args.p2pool_port is None:
503 args.p2pool_port = net.P2P_PORT
505 if args.p2pool_outgoing_conns > 10:
506 parser.error('''--outgoing-conns can't be more than 10''')
508 if args.worker_endpoint is None:
509 worker_endpoint = '', net.WORKER_PORT
510 elif ':' not in args.worker_endpoint:
511 worker_endpoint = '', int(args.worker_endpoint)
513 addr, port = args.worker_endpoint.rsplit(':', 1)
514 worker_endpoint = addr, int(port)
516 if args.address is not None:
518 args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
520 parser.error('error parsing address: ' + repr(e))
522 args.pubkey_hash = None
524 def separate_url(url):
525 s = urlparse.urlsplit(url)
526 if '@' not in s.netloc:
527 parser.error('merged url netloc must contain an "@"')
528 userpass, new_netloc = s.netloc.rsplit('@', 1)
529 return urlparse.urlunsplit(s._replace(netloc=new_netloc)), userpass
530 merged_urls = map(separate_url, args.merged_urls)
532 if args.logfile is None:
533 args.logfile = os.path.join(datadir_path, 'log')
535 logfile = logging.LogFile(args.logfile)
536 pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
537 sys.stdout = logging.AbortPipe(pipe)
538 sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
539 if hasattr(signal, "SIGUSR1"):
540 def sigusr1(signum, frame):
541 print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
543 print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
544 signal.signal(signal.SIGUSR1, sigusr1)
545 task.LoopingCall(logfile.reopen).start(5)
547 class ErrorReporter(object):
549 self.last_sent = None
551 def emit(self, eventDict):
552 if not eventDict["isError"]:
555 if self.last_sent is not None and time.time() < self.last_sent + 5:
557 self.last_sent = time.time()
559 if 'failure' in eventDict:
560 text = ((eventDict.get('why') or 'Unhandled Error')
561 + '\n' + eventDict['failure'].getTraceback())
563 text = " ".join([str(m) for m in eventDict["message"]]) + "\n"
565 from twisted.web import client
567 url='http://u.forre.st/p2pool_error.cgi',
569 postdata=p2pool.__version__ + ' ' + net.NAME + '\n' + text,
571 ).addBoth(lambda x: None)
572 if not args.no_bugreport:
573 log.addObserver(ErrorReporter().emit)
575 reactor.callWhenRunning(main, args, net, datadir_path, merged_urls, worker_endpoint)