3 from __future__ import division
17 from twisted.internet import defer, reactor
18 from twisted.web import server, resource
19 from twisted.python import log
20 from nattraverso import portmapper, ipdiscover
22 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
23 from util import db, expiring_dict, jsonrpc, variable, deferral, math
24 from . import p2p, worker_interface, skiplists
25 import p2pool.data as p2pool
26 import p2pool as p2pool_init
28 @deferral.retry('Error getting work from bitcoind:', 3)
29 @defer.inlineCallbacks
30 def getwork(bitcoind):
31 # a block could arrive in between these two queries
32 getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
34 getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
36 # get rid of residual errors
37 getwork_df.addErrback(lambda fail: None)
38 height_df.addErrback(lambda fail: None)
39 defer.returnValue((getwork, height))
41 @deferral.retry('Error getting payout script from bitcoind:', 1)
42 @defer.inlineCallbacks
43 def get_payout_script(factory):
44 res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
45 if res['reply'] == 'success':
46 defer.returnValue(res['script'])
47 elif res['reply'] == 'denied':
48 defer.returnValue(None)
50 raise ValueError('Unexpected reply: %r' % (res,))
52 @deferral.retry('Error creating payout script:', 10)
53 @defer.inlineCallbacks
54 def get_payout_script2(bitcoind, net):
55 defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
57 @defer.inlineCallbacks
63 print 'p2pool (version %s)' % (p2pool_init.__version__,)
66 # connect to bitcoind over JSON-RPC and do initial getwork
67 url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
68 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
69 bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
70 temp_work, temp_height = yield getwork(bitcoind)
72 print ' Current block hash: %x height: %i' % (temp_work.previous_block, temp_height)
75 # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
76 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
77 factory = bitcoin.p2p.ClientFactory(args.net)
78 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
79 my_script = yield get_payout_script(factory)
80 if args.pubkey_hash is None:
82 print ' IP transaction denied ... falling back to sending to address.'
83 my_script = yield get_payout_script2(bitcoind, args.net)
85 my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
87 print ' Payout script:', my_script.encode('hex')
90 print 'Loading cached block headers...'
91 ht = bitcoin.p2p.HeightTracker(factory, args.net.HEADERSTORE_FILENAME)
92 print ' ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
95 tracker = p2pool.OkayTracker(args.net)
96 chains = expiring_dict.ExpiringDict(300)
97 def get_chain(chain_id_data):
98 return chains.setdefault(chain_id_data, Chain(chain_id_data))
100 peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
102 # information affecting work that should trigger a long-polling update
103 current_work = variable.Variable(None)
104 # information affecting work that should not trigger a long-polling update
105 current_work2 = variable.Variable(None)
107 work_updated = variable.Event()
109 requested = expiring_dict.ExpiringDict(300)
111 @defer.inlineCallbacks
112 def set_real_work1():
113 work, height = yield getwork(bitcoind)
114 changed = work.previous_block != current_work.value['previous_block'] if current_work.value is not None else True
115 current_work.set(dict(
116 version=work.version,
117 previous_block=work.previous_block,
120 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
122 current_work2.set(dict(
123 clock_offset=time.time() - work.timestamp,
128 def set_real_work2():
129 best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
131 t = dict(current_work.value)
132 t['best_share_hash'] = best
136 for peer2, share_hash in desired:
137 if share_hash not in tracker.tails: # was received in the time tracker.think was running
139 last_request_time, count = requested.get(share_hash, (None, 0))
140 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
142 potential_peers = set()
143 for head in tracker.tails[share_hash]:
144 potential_peers.update(peer_heads.get(head, set()))
145 potential_peers = [peer for peer in potential_peers if peer.connected2]
146 if count == 0 and peer2 is not None and peer2.connected2:
149 peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
153 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
157 stops=list(set(tracker.heads) | set(
158 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
161 requested[share_hash] = t, count + 1
163 print 'Initializing work...'
164 yield set_real_work1()
169 start_time = time.time() - current_work2.value['clock_offset']
171 # setup p2p logic and join p2pool network
173 def share_share(share, ignore_peer=None):
174 for peer in p2p_node.peers.itervalues():
175 if peer is ignore_peer:
177 #if p2pool_init.DEBUG:
178 # print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
179 peer.send_shares([share])
182 def p2p_shares(shares, peer=None):
184 print 'Processing %i shares...' % (len(shares),)
188 if share.hash in tracker.shares:
189 #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
193 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
196 #for peer2, share_hash in desired:
197 # print 'Requesting parent share %x' % (share_hash,)
198 # peer2.send_getshares(hashes=[share_hash], parents=2000)
200 if share.bitcoin_hash <= share.header['target']:
202 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
204 if factory.conn.value is not None:
205 factory.conn.value.send_block(block=share.as_block(tracker, args.net))
207 print 'No bitcoind connection! Erp!'
209 if shares and peer is not None:
210 peer_heads.setdefault(shares[0].hash, set()).add(peer)
216 print '... done processing %i shares. Have: %i/~%i' % (len(shares), len(tracker.shares), 2*args.net.CHAIN_LENGTH)
218 def p2p_share_hashes(share_hashes, peer):
221 for share_hash in share_hashes:
222 if share_hash in tracker.shares:
224 last_request_time, count = requested.get(share_hash, (None, 0))
225 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
227 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
228 get_hashes.append(share_hash)
229 requested[share_hash] = t, count + 1
231 if share_hashes and peer is not None:
232 peer_heads.setdefault(share_hashes[0], set()).add(peer)
234 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
236 def p2p_get_shares(share_hashes, parents, stops, peer):
237 parents = min(parents, 1000//len(share_hashes))
240 for share_hash in share_hashes:
241 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
242 if share.hash in stops:
245 peer.send_shares(shares, full=True)
247 print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
251 ip, port = x.split(':')
254 return x, args.net.P2P_PORT
257 ('72.14.191.28', args.net.P2P_PORT),
258 ('62.204.197.159', args.net.P2P_PORT),
259 ('142.58.248.28', args.net.P2P_PORT),
260 ('94.23.34.145', args.net.P2P_PORT),
264 'dabuttonfactory.com',
267 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
269 log.err(None, 'Error resolving bootstrap node IP:')
272 current_work=current_work,
273 port=args.p2pool_port,
275 addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
276 mode=0 if args.low_bandwidth else 1,
277 preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
279 p2p_node.handle_shares = p2p_shares
280 p2p_node.handle_share_hashes = p2p_share_hashes
281 p2p_node.handle_get_shares = p2p_get_shares
285 # send share when the chain changes to their chain
286 def work_changed(new_work):
287 #print 'Work changed:', new_work
288 for share in tracker.get_chain_known(new_work['best_share_hash']):
291 share_share(share, share.peer)
292 current_work.changed.watch(work_changed)
297 @defer.inlineCallbacks
301 is_lan, lan_ip = yield ipdiscover.get_local_ip()
304 pm = yield portmapper.get_port_mapper()
305 yield pm._upnp.add_port_mapping(lan_ip, args.net.P2P_PORT, args.net.P2P_PORT, 'p2pool', 'TCP')
307 if p2pool_init.DEBUG:
308 log.err(None, "UPnP error:")
309 yield deferral.sleep(random.expovariate(1/120))
314 # start listening for workers with a JSON-RPC server
316 print 'Listening for workers on port %i...' % (args.worker_port,)
320 merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
321 run_identifier = struct.pack('<Q', random.randrange(2**64))
323 def compute(state, payout_script):
324 if payout_script is None:
325 payout_script = my_script
326 if state['best_share_hash'] is None and args.net.PERSIST:
327 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
328 pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
329 pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
332 for tx in pre_extra_txs:
333 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
334 if size + this_size > 500000:
339 # XXX assuming generate_tx is smallish here..
340 generate_tx = p2pool.generate_transaction(
342 previous_share_hash=state['best_share_hash'],
343 new_script=payout_script,
344 subsidy=(50*100000000 >> (state['height'] + 1)//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs),
345 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
346 block_target=state['target'],
349 print 'Generating! Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
350 #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
351 #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
352 transactions = [generate_tx] + [tx.tx for tx in extra_txs]
353 merkle_root = bitcoin.data.merkle_hash(transactions)
354 merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
356 timestamp = int(time.time() - current_work2.value['clock_offset'])
357 if state['best_share_hash'] is not None:
358 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
359 if timestamp2 > timestamp:
360 print 'Toff', timestamp2 - timestamp
361 timestamp = timestamp2
362 target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
363 times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
364 #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
365 return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
370 def got_response(data):
372 # match up with transactions
373 header = bitcoin.getwork.decode_data(data)
374 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
375 if transactions is None:
376 print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
378 block = dict(header=header, txs=transactions)
379 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
380 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
381 if factory.conn.value is not None:
382 factory.conn.value.send_block(block=block)
384 print 'No bitcoind connection! Erp!'
385 if hash_ <= block['header']['target']:
387 print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
389 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
391 print 'Received invalid share from worker - %x/%x' % (hash_, target)
393 share = p2pool.Share.from_block(block)
394 my_shares.add(share.hash)
395 print 'GOT SHARE! %s prev %s age %.2fs' % (p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
396 good = share.previous_hash == current_work.value['best_share_hash']
397 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
399 # eg. good = share.hash == current_work.value['best_share_hash'] here
402 log.err(None, 'Error processing data received from worker:')
405 web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
408 if current_work.value['best_share_hash'] is not None:
409 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
410 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height, 720))
411 return json.dumps(att_s)
412 return json.dumps(None)
415 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
416 weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
418 for script in sorted(weights, key=lambda s: weights[s]):
419 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
420 return json.dumps(res)
422 class WebInterface(resource.Resource):
423 def __init__(self, func, mime_type):
424 self.func, self.mime_type = func, mime_type
426 def render_GET(self, request):
427 request.setHeader('Content-Type', self.mime_type)
430 web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
431 web_root.putChild('users', WebInterface(get_users, 'application/json'))
433 web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
435 reactor.listenTCP(args.worker_port, server.Site(web_root))
442 tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
443 get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
446 def __init__(self, tx, seen_at_block):
447 self.hash = bitcoin.data.tx_type.hash256(tx)
449 self.seen_at_block = seen_at_block
450 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
452 #print '%x %r' % (seen_at_block, tx)
453 #for mention in self.mentions:
454 # print '%x' % mention
456 self.parents_all_in_blocks = False
459 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
460 self._find_parents_in_blocks()
462 @defer.inlineCallbacks
463 def _find_parents_in_blocks(self):
464 for tx_in in self.tx['tx_ins']:
466 raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
469 self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
470 #print raw_transaction
471 if not raw_transaction['parent_blocks']:
473 self.parents_all_in_blocks = True
476 if not self.parents_all_in_blocks:
482 @defer.inlineCallbacks
485 assert isinstance(tx_hash, (int, long))
486 #print 'REQUESTING', tx_hash
487 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
489 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
491 log.err(None, 'Error handling tx:')
492 # disable for now, for testing impact on stales
493 #factory.new_tx.watch(new_tx)
495 def new_block(block_hash):
496 work_updated.happened()
497 factory.new_block.watch(new_block)
499 print 'Started successfully!'
502 ht.updated.watch(set_real_work2)
504 @defer.inlineCallbacks
507 flag = work_updated.get_deferred()
509 yield set_real_work1()
512 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/20))], fireOnOneCallback=True)
514 @defer.inlineCallbacks
521 yield deferral.sleep(random.expovariate(1/20))
526 counter = skiplists.CountsSkipList(tracker, run_identifier)
529 yield deferral.sleep(3)
531 if current_work.value['best_share_hash'] is not None:
532 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
534 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 120))
535 weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
536 matching_in_chain = counter(current_work.value['best_share_hash'], height)
537 shares_in_chain = my_shares & matching_in_chain
538 stale_shares = my_shares - matching_in_chain
539 print 'Pool: %sH/s in %i shares Recent: %.02f%% >%sH/s Shares: %i (%i stale) Peers: %i' % (
542 weights.get(my_script, 0)/total_weight*100,
543 math.format(weights.get(my_script, 0)/total_weight*att_s),
544 len(shares_in_chain) + len(stale_shares),
547 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
548 #weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 100), 2**100)
549 #for k, v in weights.iteritems():
550 # print k.encode('hex'), v/total_weight
554 log.err(None, 'Fatal error:')
558 parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,))
559 parser.add_argument('--version', action='version', version=p2pool_init.__version__)
560 parser.add_argument('--testnet',
561 help='use the testnet',
562 action='store_const', const=p2pool.Testnet, default=p2pool.Mainnet, dest='net')
563 parser.add_argument('--debug',
564 help='debugging mode',
565 action='store_const', const=True, default=False, dest='debug')
566 parser.add_argument('-a', '--address',
567 help='generate to this address (defaults to requesting one from bitcoind)',
568 type=str, action='store', default=None, dest='address')
569 parser.add_argument('--charts',
570 help='generate charts on the web interface (requires PIL and pygame)',
571 action='store_const', const=True, default=False, dest='charts')
573 p2pool_group = parser.add_argument_group('p2pool interface')
574 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
575 help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
576 type=int, action='store', default=None, dest='p2pool_port')
577 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
578 help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
579 type=str, action='append', default=[], dest='p2pool_nodes')
580 parser.add_argument('-l', '--low-bandwidth',
581 help='trade lower bandwidth usage for higher latency (reduced efficiency)',
582 action='store_true', default=False, dest='low_bandwidth')
583 parser.add_argument('--disable-upnp',
584 help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
585 action='store_false', default=True, dest='upnp')
587 worker_group = parser.add_argument_group('worker interface')
588 worker_group.add_argument('-w', '--worker-port', metavar='PORT',
589 help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
590 type=int, action='store', default=9332, dest='worker_port')
592 bitcoind_group = parser.add_argument_group('bitcoind interface')
593 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
594 help='connect to a bitcoind at this address (default: 127.0.0.1)',
595 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
596 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
597 help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
598 type=int, action='store', default=8332, dest='bitcoind_rpc_port')
599 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
600 help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
601 type=int, action='store', default=None, dest='bitcoind_p2p_port')
603 bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
604 help='bitcoind RPC interface username',
605 type=str, action='store', dest='bitcoind_rpc_username')
606 bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
607 help='bitcoind RPC interface password',
608 type=str, action='store', dest='bitcoind_rpc_password')
610 args = parser.parse_args()
613 p2pool_init.DEBUG = True
614 class ReopeningFile(object):
615 def __init__(self, *open_args, **open_kwargs):
616 self.open_args, self.open_kwargs = open_args, open_kwargs
617 self.inner_file = open(*self.open_args, **self.open_kwargs)
619 self.inner_file.close()
620 self.inner_file = open(*self.open_args, **self.open_kwargs)
621 def write(self, data):
622 self.inner_file.write(data)
624 self.inner_file.flush()
625 class TeePipe(object):
626 def __init__(self, outputs):
627 self.outputs = outputs
628 def write(self, data):
629 for output in self.outputs:
632 for output in self.outputs:
634 class TimestampingPipe(object):
635 def __init__(self, inner_file):
636 self.inner_file = inner_file
639 def write(self, data):
640 buf = self.buf + data
641 lines = buf.split('\n')
642 for line in lines[:-1]:
643 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
644 self.inner_file.flush()
648 logfile = ReopeningFile(os.path.join(os.path.dirname(sys.argv[0]), 'debug.log'), 'w')
649 sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
650 if hasattr(signal, "SIGUSR1"):
651 def sigusr1(signum, frame):
652 print '''Caught SIGUSR1, closing 'debug.log'...'''
654 print '''...and reopened 'debug.log' after catching SIGUSR1.'''
655 signal.signal(signal.SIGUSR1, sigusr1)
657 if args.bitcoind_p2p_port is None:
658 args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
660 if args.p2pool_port is None:
661 args.p2pool_port = args.net.P2P_PORT
663 if args.address is not None:
665 args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
667 raise ValueError('error parsing address: ' + repr(e))
669 args.pubkey_hash = None
671 reactor.callWhenRunning(main, args)