1 from __future__ import division
16 from twisted.internet import defer, error, reactor, protocol, task
17 from twisted.web import server, resource
18 from twisted.python import log
19 from nattraverso import portmapper, ipdiscover
21 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
22 from bitcoin import worker_interface
23 from util import expiring_dict, jsonrpc, variable, deferral, math, logging, pack
24 from . import p2p, networks, graphs
25 import p2pool, p2pool.data as p2pool_data
27 @deferral.retry('Error getting work from bitcoind:', 3)
28 @defer.inlineCallbacks
29 def getwork(bitcoind):
31 work = yield bitcoind.rpc_getmemorypool()
32 except jsonrpc.Error, e:
33 if e.code == -32601: # Method not found
34 print >>sys.stderr, 'Error: Bitcoin version too old! Upgrade to v0.5 or newer!'
35 raise deferral.RetrySilentlyException()
37 packed_transactions = [x.decode('hex') for x in work['transactions']]
38 defer.returnValue(dict(
39 version=work['version'],
40 previous_block_hash=int(work['previousblockhash'], 16),
41 transactions=map(bitcoin_data.tx_type.unpack, packed_transactions),
42 merkle_branch=bitcoin_data.calculate_merkle_branch([0] + map(bitcoin_data.hash256, packed_transactions), 0),
43 subsidy=work['coinbasevalue'],
45 bits=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
46 coinbaseflags=work['coinbaseflags'].decode('hex') if 'coinbaseflags' in work else ''.join(x.decode('hex') for x in work['coinbaseaux'].itervalues()) if 'coinbaseaux' in work else '',
49 @defer.inlineCallbacks
50 def main(args, net, datadir_path, merged_urls, worker_endpoint):
52 print 'p2pool (version %s)' % (p2pool.__version__,)
58 print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
61 # connect to bitcoind over JSON-RPC and do initial getmemorypool
62 url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
63 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
64 bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
65 good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.PARENT.RPC_CHECK)(bitcoind)
67 print >>sys.stderr, " Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
69 temp_work = yield getwork(bitcoind)
71 print ' Current block hash: %x' % (temp_work['previous_block_hash'],)
74 # connect to bitcoind over bitcoin-p2p
75 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
76 factory = bitcoin_p2p.ClientFactory(net.PARENT)
77 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
78 yield factory.getProtocol() # waits until handshake is successful
82 print 'Determining payout script...'
83 if args.pubkey_hash is None:
84 address_path = os.path.join(datadir_path, 'cached_payout_address')
86 if os.path.exists(address_path):
87 with open(address_path, 'rb') as f:
88 address = f.read().strip('\r\n')
89 print ' Loaded cached address: %s...' % (address,)
93 if address is not None:
94 res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))()
95 if not res['isvalid'] or not res['ismine']:
96 print ' Cached address is either invalid or not controlled by local bitcoind!'
100 print ' Getting payout address from bitcoind...'
101 address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()
103 with open(address_path, 'wb') as f:
106 my_script = bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(address, net.PARENT))
108 print ' ...Computing payout script from provided address...'
109 my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
110 print ' ...success! Payout script:', bitcoin_data.script2_to_human(my_script, net.PARENT)
113 my_share_hashes = set()
114 my_doa_share_hashes = set()
116 tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes)
117 shared_share_hashes = set()
118 ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
119 known_verified = set()
121 print "Loading shares..."
122 for i, (mode, contents) in enumerate(ss.get_shares()):
124 if contents.hash in tracker.shares:
126 shared_share_hashes.add(contents.hash)
127 contents.time_seen = 0
128 tracker.add(contents)
129 if len(tracker.shares) % 1000 == 0 and tracker.shares:
130 print " %i" % (len(tracker.shares),)
131 elif mode == 'verified_hash':
132 known_verified.add(contents)
134 raise AssertionError()
135 print " ...inserting %i verified shares..." % (len(known_verified),)
136 for h in known_verified:
137 if h not in tracker.shares:
138 ss.forget_verified_share(h)
140 tracker.verified.add(tracker.shares[h])
141 print " ...done loading %i shares!" % (len(tracker.shares),)
143 tracker.removed.watch(lambda share: ss.forget_share(share.hash))
144 tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
145 tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
147 peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
149 pre_current_work = variable.Variable(None)
150 pre_merged_work = variable.Variable({})
151 # information affecting work that should trigger a long-polling update
152 current_work = variable.Variable(None)
153 # information affecting work that should not trigger a long-polling update
154 current_work2 = variable.Variable(None)
156 requested = expiring_dict.ExpiringDict(300)
158 print 'Initializing work...'
159 @defer.inlineCallbacks
160 def set_real_work1():
161 work = yield getwork(bitcoind)
162 current_work2.set(dict(
164 transactions=work['transactions'],
165 merkle_branch=work['merkle_branch'],
166 subsidy=work['subsidy'],
167 clock_offset=time.time() - work['time'],
168 last_update=time.time(),
169 )) # second set first because everything hooks on the first
170 pre_current_work.set(dict(
171 version=work['version'],
172 previous_block=work['previous_block_hash'],
174 coinbaseflags=work['coinbaseflags'],
176 yield set_real_work1()
178 if '\ngetblock ' in (yield deferral.retry()(bitcoind.rpc_help)()):
179 height_cacher = deferral.DeferredCacher(defer.inlineCallbacks(lambda block_hash: defer.returnValue((yield bitcoind.rpc_getblock('%x' % (block_hash,)))['blockcount'])))
180 best_height_cached = variable.Variable((yield deferral.retry()(height_cacher)(pre_current_work.value['previous_block'])))
181 def get_height_rel_highest(block_hash):
182 this_height = height_cacher.call_now(block_hash, 0)
183 best_height = height_cacher.call_now(pre_current_work.value['previous_block'], 0)
184 best_height_cached.set(max(best_height_cached.value, this_height, best_height))
185 return this_height - best_height_cached.value
187 get_height_rel_highest = bitcoin_p2p.HeightTracker(bitcoind, factory).get_height_rel_highest
189 def set_real_work2():
190 best, desired = tracker.think(get_height_rel_highest, pre_current_work.value['previous_block'], pre_current_work.value['bits'])
192 t = dict(pre_current_work.value)
193 t['best_share_hash'] = best
194 t['mm_chains'] = pre_merged_work.value
198 for peer2, share_hash in desired:
199 if share_hash not in tracker.tails: # was received in the time tracker.think was running
201 last_request_time, count = requested.get(share_hash, (None, 0))
202 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
204 potential_peers = set()
205 for head in tracker.tails[share_hash]:
206 potential_peers.update(peer_heads.get(head, set()))
207 potential_peers = [peer for peer in potential_peers if peer.connected2]
208 if count == 0 and peer2 is not None and peer2.connected2:
211 peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
215 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
219 stops=list(set(tracker.heads) | set(
220 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
223 requested[share_hash] = t, count + 1
224 pre_current_work.changed.watch(lambda _: set_real_work2())
225 pre_merged_work.changed.watch(lambda _: set_real_work2())
231 @defer.inlineCallbacks
232 def set_merged_work(merged_url, merged_userpass):
233 merged_proxy = jsonrpc.Proxy(merged_url, (merged_userpass,))
235 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged_proxy.rpc_getauxblock)()
236 pre_merged_work.set(dict(pre_merged_work.value, **{auxblock['chainid']: dict(
237 hash=int(auxblock['hash'], 16),
238 target=pack.IntType(256).unpack(auxblock['target'].decode('hex')),
239 merged_proxy=merged_proxy,
241 yield deferral.sleep(1)
242 for merged_url, merged_userpass in merged_urls:
243 set_merged_work(merged_url, merged_userpass)
245 @pre_merged_work.changed.watch
246 def _(new_merged_work):
247 print 'Got new merged mining work!'
249 # setup p2p logic and join p2pool network
251 class Node(p2p.Node):
252 def handle_shares(self, shares, peer):
254 print 'Processing %i shares from %s...' % (len(shares), '%s:%i' % peer.addr if peer is not None else None)
258 if share.hash in tracker.shares:
259 #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
264 #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
268 if shares and peer is not None:
269 peer_heads.setdefault(shares[0].hash, set()).add(peer)
275 print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
277 def handle_share_hashes(self, hashes, peer):
280 for share_hash in hashes:
281 if share_hash in tracker.shares:
283 last_request_time, count = requested.get(share_hash, (None, 0))
284 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
286 print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
287 get_hashes.append(share_hash)
288 requested[share_hash] = t, count + 1
290 if hashes and peer is not None:
291 peer_heads.setdefault(hashes[0], set()).add(peer)
293 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
295 def handle_get_shares(self, hashes, parents, stops, peer):
296 parents = min(parents, 1000//len(hashes))
299 for share_hash in hashes:
300 for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))):
301 if share.hash in stops:
304 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
305 peer.sendShares(shares)
307 @tracker.verified.added.watch
309 if share.pow_hash <= share.header['bits'].target:
310 if factory.conn.value is not None:
311 factory.conn.value.send_block(block=share.as_block(tracker))
313 print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Erp!'
315 print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %x' % (p2pool_data.format_hash(share.hash), share.header_hash)
317 recent_blocks.append({ 'ts': share.timestamp, 'hash': '%x' % (share.header_hash) })
319 print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
321 @defer.inlineCallbacks
324 ip, port = x.split(':')
325 defer.returnValue(((yield reactor.resolve(ip)), int(port)))
327 defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT))
330 if os.path.exists(os.path.join(datadir_path, 'addrs.txt')):
332 addrs.update(dict(eval(x) for x in open(os.path.join(datadir_path, 'addrs.txt'))))
334 print >>sys.stderr, "error reading addrs"
335 for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
338 if addr not in addrs:
339 addrs[addr] = (0, time.time(), time.time())
343 connect_addrs = set()
344 for addr_df in map(parse, args.p2pool_nodes):
346 connect_addrs.add((yield addr_df))
351 best_share_hash_func=lambda: current_work.value['best_share_hash'],
352 port=args.p2pool_port,
355 connect_addrs=connect_addrs,
360 open(os.path.join(datadir_path, 'addrs.txt'), 'w').writelines(repr(x) + '\n' for x in p2p_node.addr_store.iteritems())
361 task.LoopingCall(save_addrs).start(60)
363 # send share when the chain changes to their chain
364 def work_changed(new_work):
365 #print 'Work changed:', new_work
367 for share in tracker.get_chain(new_work['best_share_hash'], min(5, tracker.get_height(new_work['best_share_hash']))):
368 if share.hash in shared_share_hashes:
370 shared_share_hashes.add(share.hash)
373 for peer in p2p_node.peers.itervalues():
374 peer.sendShares([share for share in shares if share.peer is not peer])
376 current_work.changed.watch(work_changed)
379 for share in tracker.get_chain(current_work.value['best_share_hash'], min(tracker.get_height(current_work.value['best_share_hash']), 2*net.CHAIN_LENGTH)):
381 if share.hash in tracker.verified.shares:
382 ss.add_verified_hash(share.hash)
383 task.LoopingCall(save_shares).start(60)
388 start_time = time.time()
390 @defer.inlineCallbacks
394 is_lan, lan_ip = yield ipdiscover.get_local_ip()
396 pm = yield portmapper.get_port_mapper()
397 yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
398 except defer.TimeoutError:
402 log.err(None, "UPnP error:")
403 yield deferral.sleep(random.expovariate(1/120))
408 # start listening for workers with a JSON-RPC server
410 print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1])
412 if os.path.exists(os.path.join(datadir_path, 'vip_pass')):
413 with open(os.path.join(datadir_path, 'vip_pass'), 'rb') as f:
414 vip_pass = f.read().strip('\r\n')
416 vip_pass = '%016x' % (random.randrange(2**64),)
417 with open(os.path.join(datadir_path, 'vip_pass'), 'wb') as f:
419 print ' Worker password:', vip_pass, '(only required for generating graphs)'
423 removed_unstales_var = variable.Variable((0, 0, 0))
424 @tracker.verified.removed.watch
426 if share.hash in my_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
427 assert share.share_data['stale_info'] in [0, 253, 254] # we made these shares in this instance
428 removed_unstales_var.set((
429 removed_unstales_var.value[0] + 1,
430 removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 253 else 0),
431 removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 254 else 0),
434 removed_doa_unstales_var = variable.Variable(0)
435 @tracker.verified.removed.watch
437 if share.hash in my_doa_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
438 removed_doa_unstales.set(removed_doa_unstales.value + 1)
440 def get_stale_counts():
441 '''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)'''
442 my_shares = len(my_share_hashes)
443 my_doa_shares = len(my_doa_share_hashes)
444 delta = tracker.verified.get_delta(current_work.value['best_share_hash'])
445 my_shares_in_chain = delta.my_count + removed_unstales_var.value[0]
446 my_doa_shares_in_chain = delta.my_doa_count + removed_doa_unstales_var.value
447 orphans_recorded_in_chain = delta.my_orphan_announce_count + removed_unstales_var.value[1]
448 doas_recorded_in_chain = delta.my_dead_announce_count + removed_unstales_var.value[2]
450 my_shares_not_in_chain = my_shares - my_shares_in_chain
451 my_doa_shares_not_in_chain = my_doa_shares - my_doa_shares_in_chain
453 return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
456 local_rate_monitor = math.RateMonitor(10*60)
458 class WorkerBridge(worker_interface.WorkerBridge):
460 worker_interface.WorkerBridge.__init__(self)
461 self.new_work_event = current_work.changed
462 self.recent_shares_ts_work = []
464 def _get_payout_script_from_username(self, user):
468 pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, net.PARENT)
471 return bitcoin_data.pubkey_hash_to_script2(pubkey_hash)
473 def preprocess_request(self, request):
474 payout_script = self._get_payout_script_from_username(request.getUser())
475 if payout_script is None or random.uniform(0, 100) < args.worker_fee:
476 payout_script = my_script
477 return payout_script,
479 def get_work(self, payout_script):
480 if len(p2p_node.peers) == 0 and net.PERSIST:
481 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
482 if current_work.value['best_share_hash'] is None and net.PERSIST:
483 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
484 if time.time() > current_work2.value['last_update'] + 60:
485 raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
487 if current_work.value['mm_chains']:
488 tree, size = bitcoin_data.make_auxpow_tree(current_work.value['mm_chains'])
489 mm_hashes = [current_work.value['mm_chains'].get(tree.get(i), dict(hash=0))['hash'] for i in xrange(size)]
490 mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(dict(
491 merkle_root=bitcoin_data.merkle_hash(mm_hashes),
495 mm_later = [(aux_work, mm_hashes.index(aux_work['hash']), mm_hashes) for chain_id, aux_work in current_work.value['mm_chains'].iteritems()]
500 share_info, generate_tx = p2pool_data.generate_transaction(
503 previous_share_hash=current_work.value['best_share_hash'],
504 coinbase=(mm_data + current_work.value['coinbaseflags'])[:100],
505 nonce=struct.pack('<Q', random.randrange(2**64)),
506 new_script=payout_script,
507 subsidy=current_work2.value['subsidy'],
508 donation=math.perfect_round(65535*args.donation_percentage/100),
509 stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
510 253 if orphans > orphans_recorded_in_chain else
511 254 if doas > doas_recorded_in_chain else
513 )(*get_stale_counts()),
515 block_target=current_work.value['bits'].target,
516 desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
520 target = 2**256//2**32 - 1
521 if len(self.recent_shares_ts_work) == 50:
522 hash_rate = sum(work for ts, work in self.recent_shares_ts_work)//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
523 target = min(target, 2**256//(hash_rate * 5))
524 target = max(target, share_info['bits'].target)
525 for aux_work in current_work.value['mm_chains'].itervalues():
526 target = max(target, aux_work['target'])
528 transactions = [generate_tx] + list(current_work2.value['transactions'])
529 merkle_root = bitcoin_data.check_merkle_branch(bitcoin_data.hash256(bitcoin_data.tx_type.pack(generate_tx)), 0, current_work2.value['merkle_branch'])
531 getwork_time = time.time()
532 merkle_branch = current_work2.value['merkle_branch']
534 print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % (
535 bitcoin_data.target_to_difficulty(target),
536 bitcoin_data.target_to_difficulty(share_info['bits'].target),
537 current_work2.value['subsidy']*1e-8, net.PARENT.SYMBOL,
538 len(current_work2.value['transactions']),
541 ba = bitcoin_getwork.BlockAttempt(
542 version=current_work.value['version'],
543 previous_block=current_work.value['previous_block'],
544 merkle_root=merkle_root,
545 timestamp=current_work2.value['time'],
546 bits=current_work.value['bits'],
550 def got_response(header, request):
551 assert header['merkle_root'] == merkle_root
553 header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header))
554 pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
555 on_time = current_work.value['best_share_hash'] == share_info['share_data']['previous_share_hash']
558 if pow_hash <= header['bits'].target or p2pool.DEBUG:
559 @deferral.retry('Error submitting primary block: (will retry)', 10, 10)
561 if factory.conn.value is None:
562 print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Hash: %x' % (header_hash,)
563 raise deferral.RetrySilentlyException()
564 factory.conn.value.send_block(block=dict(header=header, txs=transactions))
566 if pow_hash <= header['bits'].target:
568 print 'GOT BLOCK FROM MINER! Passing to bitcoind! bitcoin: %x' % (header_hash,)
570 recent_blocks.append(dict(ts=time.time(), hash='%x' % (header_hash,)))
572 log.err(None, 'Error while processing potential block:')
574 for aux_work, index, hashes in mm_later:
576 if pow_hash <= aux_work['target'] or p2pool.DEBUG:
577 df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)(
578 pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
579 bitcoin_data.aux_pow_type.pack(dict(
582 block_hash=header_hash,
583 merkle_branch=merkle_branch,
586 merkle_branch=bitcoin_data.calculate_merkle_branch(hashes, index),
588 parent_block_header=header,
593 if result != (pow_hash <= aux_work['target']):
594 print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
596 print 'Merged block submittal result: %s' % (result,)
599 log.err(err, 'Error submitting merged block:')
601 log.err(None, 'Error while processing merged mining POW:')
603 if pow_hash <= share_info['bits'].target:
604 share = p2pool_data.Share(net, header, share_info, merkle_branch=merkle_branch, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
605 print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
607 p2pool_data.format_hash(share.hash),
608 p2pool_data.format_hash(share.previous_hash),
609 time.time() - getwork_time,
610 ' DEAD ON ARRIVAL' if not on_time else '',
612 my_share_hashes.add(share.hash)
614 my_doa_share_hashes.add(share.hash)
618 tracker.verified.add(share)
622 if pow_hash <= header['bits'].target or p2pool.DEBUG:
623 for peer in p2p_node.peers.itervalues():
624 peer.sendShares([share])
625 shared_share_hashes.add(share.hash)
627 log.err(None, 'Error forwarding block solution:')
629 if pow_hash <= target:
630 reactor.callLater(1, grapher.add_localrate_point, bitcoin_data.target_to_average_attempts(target), not on_time)
631 if request.getPassword() == vip_pass:
632 reactor.callLater(1, grapher.add_localminer_point, request.getUser(), bitcoin_data.target_to_average_attempts(target), not on_time)
633 self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
634 while len(self.recent_shares_ts_work) > 50:
635 self.recent_shares_ts_work.pop(0)
636 local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=request.getUser()))
639 if pow_hash > target:
640 print 'Worker %s submitted share with hash > target:' % (request.getUser(),)
641 print ' Hash: %56x' % (pow_hash,)
642 print ' Target: %56x' % (target,)
646 return ba, got_response
648 web_root = resource.Resource()
649 worker_interface.WorkerInterface(WorkerBridge()).attach_to(web_root)
652 if tracker.get_height(current_work.value['best_share_hash']) < 720:
653 return json.dumps(None)
654 return json.dumps(p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
655 / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720)))
658 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
659 weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
661 for script in sorted(weights, key=lambda s: weights[s]):
662 res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
663 return json.dumps(res)
665 def get_current_txouts():
666 share = tracker.shares[current_work.value['best_share_hash']]
667 share_info, gentx = p2pool_data.generate_transaction(tracker, share.share_info['share_data'], share.header['bits'].target, share.share_info['timestamp'], share.net)
668 return dict((out['script'], out['value']) for out in gentx['tx_outs'])
670 def get_current_scaled_txouts(scale, trunc=0):
671 txouts = get_current_txouts()
672 total = sum(txouts.itervalues())
673 results = dict((script, value*scale//total) for script, value in txouts.iteritems())
677 for s in sorted(results, key=results.__getitem__):
678 if results[s] >= trunc:
680 total_random += results[s]
683 winner = math.weighted_choice((script, results[script]) for script in random_set)
684 for script in random_set:
686 results[winner] = total_random
687 if sum(results.itervalues()) < int(scale):
688 results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
691 def get_current_payouts():
692 return json.dumps(dict((bitcoin_data.script2_to_human(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems()))
694 def get_patron_sendmany(this):
697 this, trunc = this.split('/', 1)
700 return json.dumps(dict(
701 (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
702 for script, value in get_current_scaled_txouts(scale=int(float(this)*1e8), trunc=int(float(trunc)*1e8)).iteritems()
703 if bitcoin_data.script2_to_address(script, net.PARENT) is not None
706 return json.dumps(None)
708 def get_global_stats():
709 # averaged over last hour
710 lookbehind = 3600//net.SHARE_PERIOD
711 if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
714 nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
715 stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
716 return json.dumps(dict(
717 pool_nonstale_hash_rate=nonstale_hash_rate,
718 pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
719 pool_stale_prop=stale_prop,
722 def get_local_stats():
723 lookbehind = 3600//net.SHARE_PERIOD
724 if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
727 global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
729 my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
730 my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
731 my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
732 my_share_count = my_unstale_count + my_orphan_count + my_doa_count
733 my_stale_count = my_orphan_count + my_doa_count
735 my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
737 my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
738 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1)
739 if share.hash in my_share_hashes)
740 actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp -
741 tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp)
742 share_att_s = my_work / actual_time
744 miner_hash_rates = {}
745 miner_dead_hash_rates = {}
746 datums, dt = local_rate_monitor.get_datums_in_last()
748 miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
750 miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
752 return json.dumps(dict(
753 my_hash_rates_in_last_hour=dict(
755 nonstale=share_att_s,
756 rewarded=share_att_s/(1 - global_stale_prop),
757 actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
759 my_share_counts_in_last_hour=dict(
760 shares=my_share_count,
761 unstale_shares=my_unstale_count,
762 stale_shares=my_stale_count,
763 orphan_stale_shares=my_orphan_count,
764 doa_stale_shares=my_doa_count,
766 my_stale_proportions_in_last_hour=dict(
768 orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
769 dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
771 miner_hash_rates=miner_hash_rates,
772 miner_dead_hash_rates=miner_dead_hash_rates,
775 def get_peer_addresses():
776 return ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues())
779 return json.dumps(time.time() - start_time)
781 class WebInterface(resource.Resource):
782 def __init__(self, func, mime_type, *fields):
783 self.func, self.mime_type, self.fields = func, mime_type, fields
785 def render_GET(self, request):
786 request.setHeader('Content-Type', self.mime_type)
787 request.setHeader('Access-Control-Allow-Origin', '*')
788 return self.func(*(request.args[field][0] for field in self.fields))
790 web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
791 web_root.putChild('users', WebInterface(get_users, 'application/json'))
792 web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
793 web_root.putChild('current_payouts', WebInterface(get_current_payouts, 'application/json'))
794 web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain', 'total'))
795 web_root.putChild('global_stats', WebInterface(get_global_stats, 'application/json'))
796 web_root.putChild('local_stats', WebInterface(get_local_stats, 'application/json'))
797 web_root.putChild('peer_addresses', WebInterface(get_peer_addresses, 'text/plain'))
798 web_root.putChild('payout_addr', WebInterface(lambda: json.dumps(bitcoin_data.script2_to_human(my_script, net.PARENT)), 'application/json'))
799 web_root.putChild('recent_blocks', WebInterface(lambda: json.dumps(recent_blocks), 'application/json'))
800 web_root.putChild('uptime', WebInterface(get_uptime, 'application/json'))
802 web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
804 new_root = resource.Resource()
805 web_root.putChild('web', new_root)
808 if os.path.exists(os.path.join(datadir_path, 'stats')):
810 with open(os.path.join(datadir_path, 'stats'), 'rb') as f:
811 stat_log = json.loads(f.read())
813 log.err(None, 'Error loading stats:')
814 def update_stat_log():
815 while stat_log and stat_log[0]['time'] < time.time() - 24*60*60:
818 lookbehind = 3600//net.SHARE_PERIOD
819 if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
822 global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
823 (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
825 miner_hash_rates = {}
826 miner_dead_hash_rates = {}
827 datums, dt = local_rate_monitor.get_datums_in_last()
829 miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
831 miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
833 stat_log.append(dict(
835 pool_hash_rate=p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)/(1-global_stale_prop),
836 pool_stale_prop=global_stale_prop,
837 local_hash_rates=miner_hash_rates,
838 local_dead_hash_rates=miner_dead_hash_rates,
840 stale_shares=stale_orphan_shares + stale_doa_shares,
841 stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares),
842 current_payout=get_current_txouts().get(my_script, 0)*1e-8,
845 with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
846 f.write(json.dumps(stat_log))
847 task.LoopingCall(update_stat_log).start(5*60)
848 new_root.putChild('log', WebInterface(lambda: json.dumps(stat_log), 'application/json'))
850 grapher = graphs.Grapher(os.path.join(datadir_path, 'rrd'))
851 web_root.putChild('graphs', grapher.get_resource())
853 if tracker.get_height(current_work.value['best_share_hash']) < 720:
855 nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
856 poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
857 grapher.add_poolrate_point(poolrate, poolrate - nonstalerate)
858 task.LoopingCall(add_point).start(100)
860 def attempt_listen():
862 reactor.listenTCP(worker_endpoint[1], server.Site(web_root), interface=worker_endpoint[0])
863 except error.CannotListenError, e:
864 print >>sys.stderr, 'Error binding to worker port: %s. Retrying in 1 second.' % (e.socketError,)
865 reactor.callLater(1, attempt_listen)
867 with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
875 @defer.inlineCallbacks
878 flag = factory.new_block.get_deferred()
880 yield set_real_work1()
883 yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
888 print 'Started successfully!'
892 if hasattr(signal, 'SIGALRM'):
893 signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
894 sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
896 signal.siginterrupt(signal.SIGALRM, False)
897 task.LoopingCall(signal.alarm, 30).start(1)
899 if args.irc_announce:
900 from twisted.words.protocols import irc
901 class IRCClient(irc.IRCClient):
903 def lineReceived(self, line):
905 irc.IRCClient.lineReceived(self, line)
907 irc.IRCClient.signedOn(self)
908 self.factory.resetDelay()
910 self.watch_id = tracker.verified.added.watch(self._new_share)
911 self.announced_hashes = set()
912 def _new_share(self, share):
913 if share.pow_hash <= share.header['bits'].target and share.header_hash not in self.announced_hashes and abs(share.timestamp - time.time()) < 10*60:
914 self.announced_hashes.add(share.header_hash)
915 self.say('#p2pool', '\x02BLOCK FOUND by %s! http://blockexplorer.com/block/%064x' % (bitcoin_data.script2_to_address(share.share_data['new_script'], net.PARENT), share.header_hash))
916 def connectionLost(self, reason):
917 tracker.verified.added.unwatch(self.watch_id)
918 print 'IRC connection lost:', reason.getErrorMessage()
919 class IRCClientFactory(protocol.ReconnectingClientFactory):
921 reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())
923 @defer.inlineCallbacks
928 yield deferral.sleep(3)
930 if time.time() > current_work2.value['last_update'] + 60:
931 print >>sys.stderr, '''---> LOST CONTACT WITH BITCOIND for %s! Check that it isn't frozen or dead! <---''' % (math.format_dt(time.time() - current_work2.value['last_update']),)
933 height = tracker.get_height(current_work.value['best_share_hash'])
934 this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
936 len(tracker.verified.shares),
939 sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
940 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
942 datums, dt = local_rate_monitor.get_datums_in_last()
943 my_att_s = sum(datum['work']/dt for datum in datums)
944 this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
945 math.format(int(my_att_s)),
947 math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95),
948 math.format_dt(2**256 / tracker.shares[current_work.value['best_share_hash']].target / my_att_s) if my_att_s else '???',
952 (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
953 stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], min(720, height))
954 real_att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720)) / (1 - stale_prop)
956 this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % (
957 shares, stale_orphan_shares, stale_doa_shares,
958 math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
959 math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
960 get_current_txouts().get(my_script, 0)*1e-8, net.PARENT.SYMBOL,
962 this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
963 math.format(int(real_att_s)),
965 math.format_dt(2**256 / current_work.value['bits'].target / real_att_s),
968 if this_str != last_str or time.time() > last_time + 15:
971 last_time = time.time()
976 log.err(None, 'Fatal error:')
980 class FixedArgumentParser(argparse.ArgumentParser):
981 def _read_args_from_files(self, arg_strings):
982 # expand arguments referencing files
984 for arg_string in arg_strings:
986 # for regular arguments, just add them back into the list
987 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
988 new_arg_strings.append(arg_string)
990 # replace arguments referencing files with the file content
993 args_file = open(arg_string[1:])
996 for arg_line in args_file.read().splitlines():
997 for arg in self.convert_arg_line_to_args(arg_line):
998 arg_strings.append(arg)
999 arg_strings = self._read_args_from_files(arg_strings)
1000 new_arg_strings.extend(arg_strings)
1004 err = sys.exc_info()[1]
1005 self.error(str(err))
1007 # return the modified argument list
1008 return new_arg_strings
1010 def convert_arg_line_to_args(self, arg_line):
1011 return [arg for arg in arg_line.split() if arg.strip()]
1014 realnets=dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
1016 parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
1017 parser.add_argument('--version', action='version', version=p2pool.__version__)
1018 parser.add_argument('--net',
1019 help='use specified network (default: bitcoin)',
1020 action='store', choices=sorted(realnets), default='bitcoin', dest='net_name')
1021 parser.add_argument('--testnet',
1022 help='''use the network's testnet''',
1023 action='store_const', const=True, default=False, dest='testnet')
1024 parser.add_argument('--debug',
1025 help='enable debugging mode',
1026 action='store_const', const=True, default=False, dest='debug')
1027 parser.add_argument('-a', '--address',
1028 help='generate payouts to this address (default: <address requested from bitcoind>)',
1029 type=str, action='store', default=None, dest='address')
1030 parser.add_argument('--datadir',
1031 help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
1032 type=str, action='store', default=None, dest='datadir')
1033 parser.add_argument('--logfile',
1034 help='''log to this file (default: data/<NET>/log)''',
1035 type=str, action='store', default=None, dest='logfile')
1036 parser.add_argument('--merged',
1037 help='call getauxblock on this url to get work for merged mining (example: http://ncuser:ncpass@127.0.0.1:10332/)',
1038 type=str, action='append', default=[], dest='merged_urls')
1039 parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
1040 help='donate this percentage of work to author of p2pool (default: 0.5)',
1041 type=float, action='store', default=0.5, dest='donation_percentage')
1042 parser.add_argument('--irc-announce',
1043 help='announce any blocks found on irc://irc.freenode.net/#p2pool',
1044 action='store_true', default=False, dest='irc_announce')
1046 p2pool_group = parser.add_argument_group('p2pool interface')
1047 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
1048 help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
1049 type=int, action='store', default=None, dest='p2pool_port')
1050 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
1051 help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
1052 type=str, action='append', default=[], dest='p2pool_nodes')
1053 parser.add_argument('--disable-upnp',
1054 help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
1055 action='store_false', default=True, dest='upnp')
1057 worker_group = parser.add_argument_group('worker interface')
1058 worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT',
1059 help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
1060 type=str, action='store', default=None, dest='worker_endpoint')
1061 worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
1062 help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
1063 type=float, action='store', default=0, dest='worker_fee')
1065 bitcoind_group = parser.add_argument_group('bitcoind interface')
1066 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
1067 help='connect to this address (default: 127.0.0.1)',
1068 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
1069 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
1070 help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
1071 type=int, action='store', default=None, dest='bitcoind_rpc_port')
1072 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
1073 help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
1074 type=int, action='store', default=None, dest='bitcoind_p2p_port')
1076 bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
1077 help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
1078 type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
1080 args = parser.parse_args()
1085 net_name = args.net_name + ('_testnet' if args.testnet else '')
1086 net = networks.nets[net_name]
1088 datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
1089 if not os.path.exists(datadir_path):
1090 os.makedirs(datadir_path)
1092 if len(args.bitcoind_rpc_userpass) > 2:
1093 parser.error('a maximum of two arguments are allowed')
1094 args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
1096 if args.bitcoind_rpc_password is None:
1097 if not hasattr(net.PARENT, 'CONF_FILE_FUNC'):
1098 parser.error('This network has no configuration file function. Manually enter your RPC password.')
1099 conf_path = net.PARENT.CONF_FILE_FUNC()
1100 if not os.path.exists(conf_path):
1101 parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
1102 '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
1105 '''rpcpassword=%x''' % (conf_path, random.randrange(2**128)))
1106 with open(conf_path, 'rb') as f:
1107 cp = ConfigParser.RawConfigParser()
1108 cp.readfp(StringIO.StringIO('[x]\r\n' + f.read()))
1109 for conf_name, var_name, var_type in [
1110 ('rpcuser', 'bitcoind_rpc_username', str),
1111 ('rpcpassword', 'bitcoind_rpc_password', str),
1112 ('rpcport', 'bitcoind_rpc_port', int),
1113 ('port', 'bitcoind_p2p_port', int),
1115 if getattr(args, var_name) is None and cp.has_option('x', conf_name):
1116 setattr(args, var_name, var_type(cp.get('x', conf_name)))
1118 if args.bitcoind_rpc_username is None:
1119 args.bitcoind_rpc_username = ''
1121 if args.bitcoind_rpc_port is None:
1122 args.bitcoind_rpc_port = net.PARENT.RPC_PORT
1124 if args.bitcoind_p2p_port is None:
1125 args.bitcoind_p2p_port = net.PARENT.P2P_PORT
1127 if args.p2pool_port is None:
1128 args.p2pool_port = net.P2P_PORT
1130 if args.worker_endpoint is None:
1131 worker_endpoint = '', net.WORKER_PORT
1132 elif ':' not in args.worker_endpoint:
1133 worker_endpoint = '', int(args.worker_endpoint)
1135 addr, port = args.worker_endpoint.rsplit(':', 1)
1136 worker_endpoint = addr, int(port)
1138 if args.address is not None:
1140 args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
1141 except Exception, e:
1142 parser.error('error parsing address: ' + repr(e))
1144 args.pubkey_hash = None
1146 def separate_url(url):
1147 s = urlparse.urlsplit(url)
1148 if '@' not in s.netloc:
1149 parser.error('merged url netloc must contain an "@"')
1150 userpass, new_netloc = s.netloc.rsplit('@', 1)
1151 return urlparse.urlunsplit(s._replace(netloc=new_netloc)), userpass
1152 merged_urls = map(separate_url, args.merged_urls)
1154 if args.logfile is None:
1155 args.logfile = os.path.join(datadir_path, 'log')
1157 logfile = logging.LogFile(args.logfile)
1158 pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
1159 sys.stdout = logging.AbortPipe(pipe)
1160 sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
1161 if hasattr(signal, "SIGUSR1"):
1162 def sigusr1(signum, frame):
1163 print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
1165 print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
1166 signal.signal(signal.SIGUSR1, sigusr1)
1167 task.LoopingCall(logfile.reopen).start(5)
1169 reactor.callWhenRunning(main, args, net, datadir_path, merged_urls, worker_endpoint)