3 from __future__ import division
17 from twisted.internet import defer, reactor
18 from twisted.web import server, resource
19 from twisted.python import log
20 from nattraverso import portmapper, ipdiscover
22 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
23 from util import db, expiring_dict, jsonrpc, variable, deferral, math
24 from . import p2p, worker_interface, skiplists
25 import p2pool.data as p2pool
26 import p2pool as p2pool_init
28 @deferral.retry('Error getting work from bitcoind:', 3)
29 @defer.inlineCallbacks
30 def getwork(bitcoind):
31 # a block could arrive in between these two queries
32 getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
34 getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
36 # get rid of residual errors
37 getwork_df.addErrback(lambda fail: None)
38 height_df.addErrback(lambda fail: None)
39 defer.returnValue((getwork, height))
41 @deferral.retry('Error getting payout script from bitcoind:', 1)
42 @defer.inlineCallbacks
43 def get_payout_script(factory):
44 res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
45 if res['reply'] == 'success':
46 defer.returnValue(res['script'])
47 elif res['reply'] == 'denied':
48 defer.returnValue(None)
50 raise ValueError('Unexpected reply: %r' % (res,))
52 @deferral.retry('Error creating payout script:', 10)
53 @defer.inlineCallbacks
54 def get_payout_script2(bitcoind, net):
55 defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
57 @defer.inlineCallbacks
63 print 'p2pool (version %s)' % (p2pool_init.__version__,)
66 # connect to bitcoind over JSON-RPC and do initial getwork
67 url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
68 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
69 bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
70 temp_work, temp_height = yield getwork(bitcoind)
72 print ' Current block hash: %x height: %i' % (temp_work.previous_block, temp_height)
75 # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
76 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
77 factory = bitcoin.p2p.ClientFactory(args.net)
78 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
79 my_script = yield get_payout_script(factory)
80 if args.pubkey_hash is None:
82 print ' IP transaction denied ... falling back to sending to address.'
83 my_script = yield get_payout_script2(bitcoind, args.net)
85 my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
87 print ' Payout script:', my_script.encode('hex')
90 print 'Loading cached block headers...'
91 ht = bitcoin.p2p.HeightTracker(factory, args.net.HEADERSTORE_FILENAME)
92 print ' ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
95 tracker = p2pool.OkayTracker(args.net)
96 chains = expiring_dict.ExpiringDict(300)
97 def get_chain(chain_id_data):
98 return chains.setdefault(chain_id_data, Chain(chain_id_data))
100 peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
102 # information affecting work that should trigger a long-polling update
103 current_work = variable.Variable(None)
104 # information affecting work that should not trigger a long-polling update
105 current_work2 = variable.Variable(None)
107 work_updated = variable.Event()
109 requested = expiring_dict.ExpiringDict(300)
111 @defer.inlineCallbacks
112 def set_real_work1():
113 work, height = yield getwork(bitcoind)
114 changed = work.previous_block != current_work.value['previous_block'] if current_work.value is not None else True
115 current_work.set(dict(
116 version=work.version,
117 previous_block=work.previous_block,
120 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
122 current_work2.set(dict(
123 clock_offset=time.time() - work.timestamp,
128 def set_real_work2():
129 best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
131 t = dict(current_work.value)
132 t['best_share_hash'] = best
136 for peer2, share_hash in desired:
137 if share_hash not in tracker.tails: # was received in the time tracker.think was running
139 last_request_time, count = requested.get(share_hash, (None, 0))
140 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
142 potential_peers = set()
143 for head in tracker.tails[share_hash]:
144 potential_peers.update(peer_heads.get(head, set()))
145 potential_peers = [peer for peer in potential_peers if peer.connected2]
146 if count == 0 and peer2 is not None and peer2.connected2:
149 peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
153 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
157 stops=list(set(tracker.heads) | set(
158 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
161 requested[share_hash] = t, count + 1
163 print 'Initializing work...'
164 yield set_real_work1()
168 start_time = time.time() - current_work2.value['clock_offset']
170 # setup p2p logic and join p2pool network
172 def share_share(share, ignore_peer=None):
173 for peer in p2p_node.peers.itervalues():
174 if peer is ignore_peer:
176 #if p2pool_init.DEBUG:
177 # print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
178 peer.send_shares([share])
181 def p2p_shares(shares, peer=None):
183 print 'Processing %i shares...' % (len(shares),)
187 if share.hash in tracker.shares:
188 #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
192 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
195 #for peer2, share_hash in desired:
196 # print 'Requesting parent share %x' % (share_hash,)
197 # peer2.send_getshares(hashes=[share_hash], parents=2000)
199 if share.bitcoin_hash <= share.header['target']:
201 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
203 if factory.conn.value is not None:
204 factory.conn.value.send_block(block=share.as_block(tracker, args.net))
206 print 'No bitcoind connection! Erp!'
208 if shares and peer is not None:
209 peer_heads.setdefault(shares[0].hash, set()).add(peer)
215 print '... done processing %i shares. Have: %i/~%i' % (len(shares), len(tracker.shares), 2*args.net.CHAIN_LENGTH)
217 def p2p_share_hashes(share_hashes, peer):
220 for share_hash in share_hashes:
221 if share_hash in tracker.shares:
223 last_request_time, count = requested.get(share_hash, (None, 0))
224 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
226 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
227 get_hashes.append(share_hash)
228 requested[share_hash] = t, count + 1
230 if share_hashes and peer is not None:
231 peer_heads.setdefault(share_hashes[0], set()).add(peer)
233 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
235 def p2p_get_shares(share_hashes, parents, stops, peer):
236 parents = min(parents, 1000//len(share_hashes))
239 for share_hash in share_hashes:
240 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
241 if share.hash in stops:
244 peer.send_shares(shares, full=True)
246 print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
250 ip, port = x.split(':')
253 return x, args.net.P2P_PORT
256 ('72.14.191.28', args.net.P2P_PORT),
257 ('62.204.197.159', args.net.P2P_PORT),
258 ('142.58.248.28', args.net.P2P_PORT),
259 ('94.23.34.145', args.net.P2P_PORT),
263 'dabuttonfactory.com',
266 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
268 log.err(None, 'Error resolving bootstrap node IP:')
271 current_work=current_work,
272 port=args.p2pool_port,
274 addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
275 mode=0 if args.low_bandwidth else 1,
276 preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
278 p2p_node.handle_shares = p2p_shares
279 p2p_node.handle_share_hashes = p2p_share_hashes
280 p2p_node.handle_get_shares = p2p_get_shares
284 # send share when the chain changes to their chain
285 def work_changed(new_work):
286 #print 'Work changed:', new_work
287 for share in tracker.get_chain_known(new_work['best_share_hash']):
290 share_share(share, share.peer)
291 current_work.changed.watch(work_changed)
296 @defer.inlineCallbacks
300 is_lan, lan_ip = yield ipdiscover.get_local_ip()
303 pm = yield portmapper.get_port_mapper()
304 yield pm._upnp.add_port_mapping(lan_ip, args.net.P2P_PORT, args.net.P2P_PORT, 'p2pool', 'TCP')
306 if p2pool_init.DEBUG:
307 log.err(None, "UPnP error:")
308 yield deferral.sleep(random.expovariate(1/120))
313 # start listening for workers with a JSON-RPC server
315 print 'Listening for workers on port %i...' % (args.worker_port,)
319 merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
320 run_identifier = struct.pack('<Q', random.randrange(2**64))
322 def compute(state, payout_script):
323 if payout_script is None:
324 payout_script = my_script
325 if state['best_share_hash'] is None and args.net.PERSIST:
326 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
327 pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
328 pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
331 for tx in pre_extra_txs:
332 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
333 if size + this_size > 500000:
338 # XXX assuming generate_tx is smallish here..
339 generate_tx = p2pool.generate_transaction(
341 previous_share_hash=state['best_share_hash'],
342 new_script=payout_script,
343 subsidy=(50*100000000 >> (state['height'] + 1)//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs),
344 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
345 block_target=state['target'],
348 print 'Generating! Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
349 #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
350 #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
351 transactions = [generate_tx] + [tx.tx for tx in extra_txs]
352 merkle_root = bitcoin.data.merkle_hash(transactions)
353 merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
355 timestamp = int(time.time() - current_work2.value['clock_offset'])
356 if state['best_share_hash'] is not None:
357 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
358 if timestamp2 > timestamp:
359 print 'Toff', timestamp2 - timestamp
360 timestamp = timestamp2
361 target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
362 times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
363 #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
364 return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
369 def got_response(data):
371 # match up with transactions
372 header = bitcoin.getwork.decode_data(data)
373 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
374 if transactions is None:
375 print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
377 block = dict(header=header, txs=transactions)
378 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
379 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
380 if factory.conn.value is not None:
381 factory.conn.value.send_block(block=block)
383 print 'No bitcoind connection! Erp!'
384 if hash_ <= block['header']['target']:
386 print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
388 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
390 print 'Received invalid share from worker - %x/%x' % (hash_, target)
392 share = p2pool.Share.from_block(block)
393 my_shares.add(share.hash)
394 print 'GOT SHARE! %s prev %s age %.2fs' % (p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
395 good = share.previous_hash == current_work.value['best_share_hash']
396 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
398 # eg. good = share.hash == current_work.value['best_share_hash'] here
401 log.err(None, 'Error processing data received from worker:')
404 web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
407 if current_work.value['best_share_hash'] is not None:
408 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
409 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height, 720))
410 return json.dumps(att_s)
411 return json.dumps(None)
414 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
415 weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
417 for script in sorted(weights, key=lambda s: weights[s]):
418 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
419 return json.dumps(res)
421 class WebInterface(resource.Resource):
422 def __init__(self, func, mime_type):
423 self.func, self.mime_type = func, mime_type
425 def render_GET(self, request):
426 request.setHeader('Content-Type', self.mime_type)
429 web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
430 web_root.putChild('users', WebInterface(get_users, 'application/json'))
432 web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
434 reactor.listenTCP(args.worker_port, server.Site(web_root))
441 tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
442 get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
445 def __init__(self, tx, seen_at_block):
446 self.hash = bitcoin.data.tx_type.hash256(tx)
448 self.seen_at_block = seen_at_block
449 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
451 #print '%x %r' % (seen_at_block, tx)
452 #for mention in self.mentions:
453 # print '%x' % mention
455 self.parents_all_in_blocks = False
458 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
459 self._find_parents_in_blocks()
461 @defer.inlineCallbacks
462 def _find_parents_in_blocks(self):
463 for tx_in in self.tx['tx_ins']:
465 raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
468 self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
469 #print raw_transaction
470 if not raw_transaction['parent_blocks']:
472 self.parents_all_in_blocks = True
475 if not self.parents_all_in_blocks:
481 @defer.inlineCallbacks
484 assert isinstance(tx_hash, (int, long))
485 #print 'REQUESTING', tx_hash
486 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
488 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
490 log.err(None, 'Error handling tx:')
491 # disable for now, for testing impact on stales
492 #factory.new_tx.watch(new_tx)
494 def new_block(block_hash):
495 work_updated.happened()
496 factory.new_block.watch(new_block)
498 print 'Started successfully!'
501 ht.updated.watch(set_real_work2)
503 @defer.inlineCallbacks
506 flag = work_updated.get_deferred()
508 yield set_real_work1()
511 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/20))], fireOnOneCallback=True)
513 @defer.inlineCallbacks
520 yield deferral.sleep(random.expovariate(1/20))
525 counter = skiplists.CountsSkipList(tracker, run_identifier)
528 yield deferral.sleep(3)
530 if current_work.value['best_share_hash'] is not None:
531 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
533 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 120))
534 weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
535 matching_in_chain = counter(current_work.value['best_share_hash'], height)
536 shares_in_chain = my_shares & matching_in_chain
537 stale_shares = my_shares - matching_in_chain
538 print 'Pool: %sH/s in %i shares Recent: %.02f%% >%sH/s Shares: %i (%i stale) Peers: %i' % (
541 weights.get(my_script, 0)/total_weight*100,
542 math.format(weights.get(my_script, 0)/total_weight*att_s),
543 len(shares_in_chain) + len(stale_shares),
546 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
547 #weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 100), 2**100)
548 #for k, v in weights.iteritems():
549 # print k.encode('hex'), v/total_weight
553 log.err(None, 'Fatal error:')
557 parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,))
558 parser.add_argument('--version', action='version', version=p2pool_init.__version__)
559 parser.add_argument('--testnet',
560 help='use the testnet',
561 action='store_const', const=p2pool.Testnet, default=p2pool.Mainnet, dest='net')
562 parser.add_argument('--debug',
563 help='debugging mode',
564 action='store_const', const=True, default=False, dest='debug')
565 parser.add_argument('-a', '--address',
566 help='generate to this address (defaults to requesting one from bitcoind)',
567 type=str, action='store', default=None, dest='address')
568 parser.add_argument('--charts',
569 help='generate charts on the web interface (requires PIL and pygame)',
570 action='store_const', const=True, default=False, dest='charts')
572 p2pool_group = parser.add_argument_group('p2pool interface')
573 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
574 help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
575 type=int, action='store', default=None, dest='p2pool_port')
576 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
577 help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
578 type=str, action='append', default=[], dest='p2pool_nodes')
579 parser.add_argument('-l', '--low-bandwidth',
580 help='trade lower bandwidth usage for higher latency (reduced efficiency)',
581 action='store_true', default=False, dest='low_bandwidth')
582 parser.add_argument('--disable-upnp',
583 help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
584 action='store_false', default=True, dest='upnp')
586 worker_group = parser.add_argument_group('worker interface')
587 worker_group.add_argument('-w', '--worker-port', metavar='PORT',
588 help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
589 type=int, action='store', default=9332, dest='worker_port')
591 bitcoind_group = parser.add_argument_group('bitcoind interface')
592 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
593 help='connect to a bitcoind at this address (default: 127.0.0.1)',
594 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
595 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
596 help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
597 type=int, action='store', default=8332, dest='bitcoind_rpc_port')
598 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
599 help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
600 type=int, action='store', default=None, dest='bitcoind_p2p_port')
602 bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
603 help='bitcoind RPC interface username',
604 type=str, action='store', dest='bitcoind_rpc_username')
605 bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
606 help='bitcoind RPC interface password',
607 type=str, action='store', dest='bitcoind_rpc_password')
609 args = parser.parse_args()
612 p2pool_init.DEBUG = True
613 class ReopeningFile(object):
614 def __init__(self, *open_args, **open_kwargs):
615 self.open_args, self.open_kwargs = open_args, open_kwargs
616 self.inner_file = open(*self.open_args, **self.open_kwargs)
618 self.inner_file.close()
619 self.inner_file = open(*self.open_args, **self.open_kwargs)
620 def write(self, data):
621 self.inner_file.write(data)
623 self.inner_file.flush()
624 class TeePipe(object):
625 def __init__(self, outputs):
626 self.outputs = outputs
627 def write(self, data):
628 for output in self.outputs:
631 for output in self.outputs:
633 class TimestampingPipe(object):
634 def __init__(self, inner_file):
635 self.inner_file = inner_file
638 def write(self, data):
639 buf = self.buf + data
640 lines = buf.split('\n')
641 for line in lines[:-1]:
642 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
643 self.inner_file.flush()
647 logfile = ReopeningFile(os.path.join(os.path.dirname(sys.argv[0]), 'debug.log'), 'w')
648 sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
649 if hasattr(signal, "SIGUSR1"):
650 def sigusr1(signum, frame):
651 print '''Caught SIGUSR1, closing 'debug.log'...'''
653 print '''...and reopened 'debug.log' after catching SIGUSR1.'''
654 signal.signal(signal.SIGUSR1, sigusr1)
656 if args.bitcoind_p2p_port is None:
657 args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
659 if args.p2pool_port is None:
660 args.p2pool_port = args.net.P2P_PORT
662 if args.address is not None:
664 args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
666 raise ValueError('error parsing address: ' + repr(e))
668 args.pubkey_hash = None
670 reactor.callWhenRunning(main, args)