1 from __future__ import division
14 if '--iocp' in sys.argv:
15 from twisted.internet import iocpreactor
17 from twisted.internet import defer, reactor, protocol, task
18 from twisted.web import server
19 from twisted.python import log
20 from nattraverso import portmapper, ipdiscover
22 import bitcoin.p2p as bitcoin_p2p, bitcoin.data as bitcoin_data
23 from bitcoin import stratum, worker_interface, helper
24 from util import fixargparse, jsonrpc, variable, deferral, math, logging, switchprotocol
25 from . import networks, web, work
26 import p2pool, p2pool.data as p2pool_data, p2pool.node as p2pool_node
28 @defer.inlineCallbacks
29 def main(args, net, datadir_path, merged_urls, worker_endpoint):
31 print 'p2pool (version %s)' % (p2pool.__version__,)
34 @defer.inlineCallbacks
36 # connect to bitcoind over bitcoin-p2p
37 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
38 factory = bitcoin_p2p.ClientFactory(net.PARENT)
39 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
40 yield factory.getProtocol() # waits until handshake is successful
43 defer.returnValue(factory)
45 if args.testnet: # establish p2p connection first if testnet so bitcoind can work without connections
46 factory = yield connect_p2p()
48 # connect to bitcoind over JSON-RPC and do initial getmemorypool
49 url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http', args.bitcoind_address, args.bitcoind_rpc_port)
50 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
51 bitcoind = jsonrpc.HTTPProxy(url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30)
52 yield helper.check(bitcoind, net)
53 temp_work = yield helper.getwork(bitcoind)
55 bitcoind_warning_var = variable.Variable(None)
56 @defer.inlineCallbacks
58 errors = (yield deferral.retry('Error while calling getmininginfo:')(bitcoind.rpc_getmininginfo)())['errors']
59 bitcoind_warning_var.set(errors if errors != '' else None)
61 task.LoopingCall(poll_warnings).start(20*60)
64 print ' Current block hash: %x' % (temp_work['previous_block'],)
65 print ' Current block height: %i' % (temp_work['height'] - 1,)
69 factory = yield connect_p2p()
71 print 'Determining payout address...'
72 if args.pubkey_hash is None:
73 address_path = os.path.join(datadir_path, 'cached_payout_address')
75 if os.path.exists(address_path):
76 with open(address_path, 'rb') as f:
77 address = f.read().strip('\r\n')
78 print ' Loaded cached address: %s...' % (address,)
82 if address is not None:
83 res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))()
84 if not res['isvalid'] or not res['ismine']:
85 print ' Cached address is either invalid or not controlled by local bitcoind!'
89 print ' Getting payout address from bitcoind...'
90 address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()
92 with open(address_path, 'wb') as f:
95 my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(address, net.PARENT)
97 my_pubkey_hash = args.pubkey_hash
98 print ' ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)
101 ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
103 known_verified = set()
104 print "Loading shares..."
105 for i, (mode, contents) in enumerate(ss.get_shares()):
107 contents.time_seen = 0
108 shares[contents.hash] = contents
109 if len(shares) % 1000 == 0 and shares:
110 print " %i" % (len(shares),)
111 elif mode == 'verified_hash':
112 known_verified.add(contents)
114 raise AssertionError()
115 print " ...done loading %i shares (%i verified)!" % (len(shares), len(known_verified))
119 print 'Initializing work...'
121 node = p2pool_node.Node(factory, bitcoind, shares.values(), known_verified, net)
124 for share_hash in shares:
125 if share_hash not in node.tracker.items:
126 ss.forget_share(share_hash)
127 for share_hash in known_verified:
128 if share_hash not in node.tracker.verified.items:
129 ss.forget_verified_share(share_hash)
130 del shares, known_verified
131 node.tracker.removed.watch(lambda share: ss.forget_share(share.hash))
132 node.tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
135 for share in node.tracker.get_chain(node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 2*net.CHAIN_LENGTH)):
137 if share.hash in node.tracker.verified.items:
138 ss.add_verified_hash(share.hash)
139 task.LoopingCall(save_shares).start(60)
145 print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
147 @defer.inlineCallbacks
150 ip, port = x.split(':')
151 defer.returnValue(((yield reactor.resolve(ip)), int(port)))
153 defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT))
156 if os.path.exists(os.path.join(datadir_path, 'addrs')):
158 with open(os.path.join(datadir_path, 'addrs'), 'rb') as f:
159 addrs.update(dict((tuple(k), v) for k, v in json.loads(f.read())))
161 print >>sys.stderr, 'error parsing addrs'
162 for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
165 if addr not in addrs:
166 addrs[addr] = (0, time.time(), time.time())
170 connect_addrs = set()
171 for addr_df in map(parse, args.p2pool_nodes):
173 connect_addrs.add((yield addr_df))
177 node.p2p_node = p2pool_node.P2PNode(node,
178 port=args.p2pool_port,
179 max_incoming_conns=args.p2pool_conns,
181 connect_addrs=connect_addrs,
182 desired_outgoing_conns=args.p2pool_outgoing_conns,
184 node.p2p_node.start()
187 with open(os.path.join(datadir_path, 'addrs'), 'wb') as f:
188 f.write(json.dumps(node.p2p_node.addr_store.items()))
189 task.LoopingCall(save_addrs).start(60)
195 @defer.inlineCallbacks
199 is_lan, lan_ip = yield ipdiscover.get_local_ip()
201 pm = yield portmapper.get_port_mapper()
202 yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
203 except defer.TimeoutError:
207 log.err(None, 'UPnP error:')
208 yield deferral.sleep(random.expovariate(1/120))
211 # start listening for workers with a JSON-RPC server
213 print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1])
215 wb = work.WorkerBridge(node, my_pubkey_hash, args.donation_percentage, merged_urls, args.worker_fee)
216 web_root = web.get_web_root(wb, datadir_path, bitcoind_warning_var)
217 caching_wb = worker_interface.CachingWorkerBridge(wb)
218 worker_interface.WorkerInterface(caching_wb).attach_to(web_root, get_handler=lambda request: request.redirect('/static/'))
219 web_serverfactory = server.Site(web_root)
222 serverfactory = switchprotocol.FirstByteSwitchFactory({'{': stratum.StratumServerFactory(caching_wb)}, web_serverfactory)
223 deferral.retry('Error binding to worker port:', traceback=False)(reactor.listenTCP)(worker_endpoint[1], serverfactory, interface=worker_endpoint[0])
225 with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
233 print 'Started successfully!'
234 print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % (worker_endpoint[1],)
235 if args.donation_percentage > 1.1:
236 print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % (args.donation_percentage,)
237 elif args.donation_percentage < .9:
238 print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % (args.donation_percentage,)
240 print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % (args.donation_percentage,)
241 print 'You can increase this amount with --give-author argument! (or decrease it, if you must)'
245 if hasattr(signal, 'SIGALRM'):
246 signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
247 sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
249 signal.siginterrupt(signal.SIGALRM, False)
250 task.LoopingCall(signal.alarm, 30).start(1)
252 if args.irc_announce:
253 from twisted.words.protocols import irc
254 class IRCClient(irc.IRCClient):
255 nickname = 'p2pool%02i' % (random.randrange(100),)
256 channel = net.ANNOUNCE_CHANNEL
257 def lineReceived(self, line):
260 irc.IRCClient.lineReceived(self, line)
262 self.in_channel = False
263 irc.IRCClient.signedOn(self)
264 self.factory.resetDelay()
265 self.join(self.channel)
266 @defer.inlineCallbacks
267 def new_share(share):
268 if not self.in_channel:
270 if share.pow_hash <= share.header['bits'].target and abs(share.timestamp - time.time()) < 10*60:
271 yield deferral.sleep(random.expovariate(1/60))
272 message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
273 if all('%x' % (share.header_hash,) not in old_message for old_message in self.recent_messages):
274 self.say(self.channel, message)
275 self._remember_message(message)
276 self.watch_id = node.tracker.verified.added.watch(new_share)
277 self.recent_messages = []
278 def joined(self, channel):
279 self.in_channel = True
280 def left(self, channel):
281 self.in_channel = False
282 def _remember_message(self, message):
283 self.recent_messages.append(message)
284 while len(self.recent_messages) > 100:
285 self.recent_messages.pop(0)
286 def privmsg(self, user, channel, message):
287 if channel == self.channel:
288 self._remember_message(message)
289 def connectionLost(self, reason):
290 node.tracker.verified.added.unwatch(self.watch_id)
291 print 'IRC connection lost:', reason.getErrorMessage()
292 class IRCClientFactory(protocol.ReconnectingClientFactory):
294 reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())
296 @defer.inlineCallbacks
301 yield deferral.sleep(3)
303 height = node.tracker.get_height(node.best_share_var.value)
304 this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
306 len(node.tracker.verified.items),
307 len(node.tracker.items),
308 len(node.p2p_node.peers),
309 sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
310 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
312 datums, dt = wb.local_rate_monitor.get_datums_in_last()
313 my_att_s = sum(datum['work']/dt for datum in datums)
314 this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
315 math.format(int(my_att_s)),
317 math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95),
318 math.format_dt(2**256 / node.tracker.items[node.best_share_var.value].max_target / my_att_s) if my_att_s and node.best_share_var.value else '???',
322 (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()
323 stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, min(60*60//net.SHARE_PERIOD, height))
324 real_att_s = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, min(height - 1, 60*60//net.SHARE_PERIOD)) / (1 - stale_prop)
326 this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % (
327 shares, stale_orphan_shares, stale_doa_shares,
328 math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
329 math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
330 node.get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8, net.PARENT.SYMBOL,
332 this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
333 math.format(int(real_att_s)),
335 math.format_dt(2**256 / node.bitcoind_work.value['bits'].target / real_att_s),
338 for warning in p2pool_data.get_warnings(node.tracker, node.best_share_var.value, net, bitcoind_warning_var.value, node.bitcoind_work.value):
339 print >>sys.stderr, '#'*40
340 print >>sys.stderr, '>>> Warning: ' + warning
341 print >>sys.stderr, '#'*40
344 print '%i pieces of uncollectable cyclic garbage! Types: %r' % (len(gc.garbage), map(type, gc.garbage))
346 if this_str != last_str or time.time() > last_time + 15:
349 last_time = time.time()
355 log.err(None, 'Fatal error:')
358 realnets = dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
360 parser = fixargparse.FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
361 parser.add_argument('--version', action='version', version=p2pool.__version__)
362 parser.add_argument('--net',
363 help='use specified network (default: bitcoin)',
364 action='store', choices=sorted(realnets), default='bitcoin', dest='net_name')
365 parser.add_argument('--testnet',
366 help='''use the network's testnet''',
367 action='store_const', const=True, default=False, dest='testnet')
368 parser.add_argument('--debug',
369 help='enable debugging mode',
370 action='store_const', const=True, default=False, dest='debug')
371 parser.add_argument('-a', '--address',
372 help='generate payouts to this address (default: <address requested from bitcoind>)',
373 type=str, action='store', default=None, dest='address')
374 parser.add_argument('--datadir',
375 help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
376 type=str, action='store', default=None, dest='datadir')
377 parser.add_argument('--logfile',
378 help='''log to this file (default: data/<NET>/log)''',
379 type=str, action='store', default=None, dest='logfile')
380 parser.add_argument('--merged',
381 help='call getauxblock on this url to get work for merged mining (example: http://ncuser:ncpass@127.0.0.1:10332/)',
382 type=str, action='append', default=[], dest='merged_urls')
383 parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
384 help='donate this percentage of work towards the development of p2pool (default: 1.0)',
385 type=float, action='store', default=1.0, dest='donation_percentage')
386 parser.add_argument('--iocp',
387 help='use Windows IOCP API in order to avoid errors due to large number of sockets being open',
388 action='store_true', default=False, dest='iocp')
389 parser.add_argument('--irc-announce',
390 help='announce any blocks found on irc://irc.freenode.net/#p2pool',
391 action='store_true', default=False, dest='irc_announce')
392 parser.add_argument('--no-bugreport',
393 help='disable submitting caught exceptions to the author',
394 action='store_true', default=False, dest='no_bugreport')
396 p2pool_group = parser.add_argument_group('p2pool interface')
397 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
398 help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
399 type=int, action='store', default=None, dest='p2pool_port')
400 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
401 help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
402 type=str, action='append', default=[], dest='p2pool_nodes')
403 parser.add_argument('--disable-upnp',
404 help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
405 action='store_false', default=True, dest='upnp')
406 p2pool_group.add_argument('--max-conns', metavar='CONNS',
407 help='maximum incoming connections (default: 40)',
408 type=int, action='store', default=40, dest='p2pool_conns')
409 p2pool_group.add_argument('--outgoing-conns', metavar='CONNS',
410 help='outgoing connections (default: 6)',
411 type=int, action='store', default=6, dest='p2pool_outgoing_conns')
413 worker_group = parser.add_argument_group('worker interface')
414 worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT',
415 help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
416 type=str, action='store', default=None, dest='worker_endpoint')
417 worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
418 help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
419 type=float, action='store', default=0, dest='worker_fee')
421 bitcoind_group = parser.add_argument_group('bitcoind interface')
422 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
423 help='connect to this address (default: 127.0.0.1)',
424 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
425 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
426 help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
427 type=int, action='store', default=None, dest='bitcoind_rpc_port')
428 bitcoind_group.add_argument('--bitcoind-rpc-ssl',
429 help='connect to JSON-RPC interface using SSL',
430 action='store_true', default=False, dest='bitcoind_rpc_ssl')
431 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
432 help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
433 type=int, action='store', default=None, dest='bitcoind_p2p_port')
435 bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
436 help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
437 type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
439 args = parser.parse_args()
443 defer.setDebugging(True)
447 net_name = args.net_name + ('_testnet' if args.testnet else '')
448 net = networks.nets[net_name]
450 datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
451 if not os.path.exists(datadir_path):
452 os.makedirs(datadir_path)
454 if len(args.bitcoind_rpc_userpass) > 2:
455 parser.error('a maximum of two arguments are allowed')
456 args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
458 if args.bitcoind_rpc_password is None:
459 conf_path = net.PARENT.CONF_FILE_FUNC()
460 if not os.path.exists(conf_path):
461 parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
462 '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
465 '''rpcpassword=%x\r\n'''
467 '''Keep that password secret! After creating the file, restart Bitcoin.''' % (conf_path, random.randrange(2**128)))
468 conf = open(conf_path, 'rb').read()
470 for line in conf.splitlines(True):
472 line = line[:line.index('#')]
475 k, v = line.split('=', 1)
476 contents[k.strip()] = v.strip()
477 for conf_name, var_name, var_type in [
478 ('rpcuser', 'bitcoind_rpc_username', str),
479 ('rpcpassword', 'bitcoind_rpc_password', str),
480 ('rpcport', 'bitcoind_rpc_port', int),
481 ('port', 'bitcoind_p2p_port', int),
483 if getattr(args, var_name) is None and conf_name in contents:
484 setattr(args, var_name, var_type(contents[conf_name]))
485 if args.bitcoind_rpc_password is None:
486 parser.error('''Bitcoin configuration file didn't contain an rpcpassword= line! Add one!''')
488 if args.bitcoind_rpc_username is None:
489 args.bitcoind_rpc_username = ''
491 if args.bitcoind_rpc_port is None:
492 args.bitcoind_rpc_port = net.PARENT.RPC_PORT
494 if args.bitcoind_p2p_port is None:
495 args.bitcoind_p2p_port = net.PARENT.P2P_PORT
497 if args.p2pool_port is None:
498 args.p2pool_port = net.P2P_PORT
500 if args.p2pool_outgoing_conns > 10:
501 parser.error('''--outgoing-conns can't be more than 10''')
503 if args.worker_endpoint is None:
504 worker_endpoint = '', net.WORKER_PORT
505 elif ':' not in args.worker_endpoint:
506 worker_endpoint = '', int(args.worker_endpoint)
508 addr, port = args.worker_endpoint.rsplit(':', 1)
509 worker_endpoint = addr, int(port)
511 if args.address is not None:
513 args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
515 parser.error('error parsing address: ' + repr(e))
517 args.pubkey_hash = None
519 def separate_url(url):
520 s = urlparse.urlsplit(url)
521 if '@' not in s.netloc:
522 parser.error('merged url netloc must contain an "@"')
523 userpass, new_netloc = s.netloc.rsplit('@', 1)
524 return urlparse.urlunsplit(s._replace(netloc=new_netloc)), userpass
525 merged_urls = map(separate_url, args.merged_urls)
527 if args.logfile is None:
528 args.logfile = os.path.join(datadir_path, 'log')
530 logfile = logging.LogFile(args.logfile)
531 pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
532 sys.stdout = logging.AbortPipe(pipe)
533 sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
534 if hasattr(signal, "SIGUSR1"):
535 def sigusr1(signum, frame):
536 print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
538 print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
539 signal.signal(signal.SIGUSR1, sigusr1)
540 task.LoopingCall(logfile.reopen).start(5)
542 class ErrorReporter(object):
544 self.last_sent = None
546 def emit(self, eventDict):
547 if not eventDict["isError"]:
550 if self.last_sent is not None and time.time() < self.last_sent + 5:
552 self.last_sent = time.time()
554 if 'failure' in eventDict:
555 text = ((eventDict.get('why') or 'Unhandled Error')
556 + '\n' + eventDict['failure'].getTraceback())
558 text = " ".join([str(m) for m in eventDict["message"]]) + "\n"
560 from twisted.web import client
562 url='http://u.forre.st/p2pool_error.cgi',
564 postdata=p2pool.__version__ + ' ' + net.NAME + '\n' + text,
566 ).addBoth(lambda x: None)
567 if not args.no_bugreport:
568 log.addObserver(ErrorReporter().emit)
570 reactor.callWhenRunning(main, args, net, datadir_path, merged_urls, worker_endpoint)