3 from __future__ import division
17 from twisted.internet import defer, reactor
18 from twisted.web import server, resource
19 from twisted.python import log
20 from nattraverso import portmapper, ipdiscover
22 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
23 from util import db, expiring_dict, jsonrpc, variable, deferral, math
24 from . import p2p, worker_interface, skiplists
25 import p2pool.data as p2pool
26 import p2pool as p2pool_init
28 @deferral.retry('Error getting work from bitcoind:', 3)
29 @defer.inlineCallbacks
30 def getwork(bitcoind):
31 # a block could arrive in between these two queries
32 getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
34 getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
36 # get rid of residual errors
37 getwork_df.addErrback(lambda fail: None)
38 height_df.addErrback(lambda fail: None)
39 defer.returnValue((getwork, height))
41 @deferral.retry('Error getting payout script from bitcoind:', 1)
42 @defer.inlineCallbacks
43 def get_payout_script(factory):
44 res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
45 if res['reply'] == 'success':
46 defer.returnValue(res['script'])
47 elif res['reply'] == 'denied':
48 defer.returnValue(None)
50 raise ValueError('Unexpected reply: %r' % (res,))
52 @deferral.retry('Error creating payout script:', 10)
53 @defer.inlineCallbacks
54 def get_payout_script2(bitcoind, net):
55 defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
57 @defer.inlineCallbacks
63 print 'p2pool (version %s)' % (p2pool_init.__version__,)
66 # connect to bitcoind over JSON-RPC and do initial getwork
67 url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
68 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
69 bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
70 temp_work, temp_height = yield getwork(bitcoind)
72 print ' Current block hash: %x height: %i' % (temp_work.previous_block, temp_height)
75 # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
76 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
77 factory = bitcoin.p2p.ClientFactory(args.net)
78 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
79 my_script = yield get_payout_script(factory)
80 if args.pubkey_hash is None:
82 print ' IP transaction denied ... falling back to sending to address.'
83 my_script = yield get_payout_script2(bitcoind, args.net)
85 my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
87 print ' Payout script:', my_script.encode('hex')
90 ht = bitcoin.p2p.HeightTracker(factory)
92 tracker = p2pool.OkayTracker(args.net)
93 chains = expiring_dict.ExpiringDict(300)
94 def get_chain(chain_id_data):
95 return chains.setdefault(chain_id_data, Chain(chain_id_data))
97 peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
99 # information affecting work that should trigger a long-polling update
100 current_work = variable.Variable(None)
101 # information affecting work that should not trigger a long-polling update
102 current_work2 = variable.Variable(None)
104 work_updated = variable.Event()
106 requested = expiring_dict.ExpiringDict(300)
108 @defer.inlineCallbacks
109 def set_real_work1():
110 work, height = yield getwork(bitcoind)
111 changed = work.previous_block != current_work.value['previous_block'] if current_work.value is not None else True
112 current_work.set(dict(
113 version=work.version,
114 previous_block=work.previous_block,
117 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
119 current_work2.set(dict(
120 clock_offset=time.time() - work.timestamp,
125 def set_real_work2():
126 best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
128 t = dict(current_work.value)
129 t['best_share_hash'] = best
133 for peer2, share_hash in desired:
134 if share_hash not in tracker.tails: # was received in the time tracker.think was running
136 last_request_time, count = requested.get(share_hash, (None, 0))
137 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
139 potential_peers = set()
140 for head in tracker.tails[share_hash]:
141 potential_peers.update(peer_heads.get(head, set()))
142 potential_peers = [peer for peer in potential_peers if peer.connected2]
143 if count == 0 and peer2 is not None and peer2.connected2:
146 peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
150 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
154 stops=list(set(tracker.heads) | set(
155 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
158 requested[share_hash] = t, count + 1
160 print 'Initializing work...'
161 yield set_real_work1()
165 start_time = time.time() - current_work2.value['clock_offset']
167 # setup p2p logic and join p2pool network
169 def share_share(share, ignore_peer=None):
170 for peer in p2p_node.peers.itervalues():
171 if peer is ignore_peer:
173 #if p2pool_init.DEBUG:
174 # print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
175 peer.send_shares([share])
178 def p2p_shares(shares, peer=None):
180 print 'Processing %i shares...' % (len(shares),)
184 if share.hash in tracker.shares:
185 #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
189 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
192 #for peer2, share_hash in desired:
193 # print 'Requesting parent share %x' % (share_hash,)
194 # peer2.send_getshares(hashes=[share_hash], parents=2000)
196 if share.bitcoin_hash <= share.header['target']:
198 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
200 if factory.conn.value is not None:
201 factory.conn.value.send_block(block=share.as_block(tracker, args.net))
203 print 'No bitcoind connection! Erp!'
205 if shares and peer is not None:
206 peer_heads.setdefault(shares[0].hash, set()).add(peer)
212 print '... done processing %i shares. Have: %i/~%i' % (len(shares), len(tracker.shares), 2*args.net.CHAIN_LENGTH)
214 def p2p_share_hashes(share_hashes, peer):
217 for share_hash in share_hashes:
218 if share_hash in tracker.shares:
220 last_request_time, count = requested.get(share_hash, (None, 0))
221 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
223 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
224 get_hashes.append(share_hash)
225 requested[share_hash] = t, count + 1
227 if share_hashes and peer is not None:
228 peer_heads.setdefault(share_hashes[0], set()).add(peer)
230 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
232 def p2p_get_shares(share_hashes, parents, stops, peer):
233 parents = min(parents, 1000//len(share_hashes))
236 for share_hash in share_hashes:
237 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
238 if share.hash in stops:
241 peer.send_shares(shares, full=True)
243 print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
247 ip, port = x.split(':')
250 return x, args.net.P2P_PORT
253 ('72.14.191.28', args.net.P2P_PORT),
254 ('62.204.197.159', args.net.P2P_PORT),
255 ('142.58.248.28', args.net.P2P_PORT),
256 ('94.23.34.145', args.net.P2P_PORT),
260 'dabuttonfactory.com',
263 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
265 log.err(None, 'Error resolving bootstrap node IP:')
268 current_work=current_work,
269 port=args.p2pool_port,
271 addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
272 mode=0 if args.low_bandwidth else 1,
273 preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
275 p2p_node.handle_shares = p2p_shares
276 p2p_node.handle_share_hashes = p2p_share_hashes
277 p2p_node.handle_get_shares = p2p_get_shares
281 # send share when the chain changes to their chain
282 def work_changed(new_work):
283 #print 'Work changed:', new_work
284 for share in tracker.get_chain_known(new_work['best_share_hash']):
287 share_share(share, share.peer)
288 current_work.changed.watch(work_changed)
293 @defer.inlineCallbacks
297 is_lan, lan_ip = yield ipdiscover.get_local_ip()
300 pm = yield portmapper.get_port_mapper()
301 yield pm._upnp.add_port_mapping(lan_ip, args.net.P2P_PORT, args.net.P2P_PORT, 'p2pool', 'TCP')
303 if p2pool_init.DEBUG:
304 log.err(None, "UPnP error:")
305 yield deferral.sleep(random.expovariate(1/120))
310 # start listening for workers with a JSON-RPC server
312 print 'Listening for workers on port %i...' % (args.worker_port,)
316 merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
317 run_identifier = struct.pack('<Q', random.randrange(2**64))
319 def compute(state, payout_script):
320 if payout_script is None:
321 payout_script = my_script
322 if state['best_share_hash'] is None and args.net.PERSIST:
323 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
324 pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
325 pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
328 for tx in pre_extra_txs:
329 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
330 if size + this_size > 500000:
335 # XXX assuming generate_tx is smallish here..
336 generate_tx = p2pool.generate_transaction(
338 previous_share_hash=state['best_share_hash'],
339 new_script=payout_script,
340 subsidy=(50*100000000 >> (state['height'] + 1)//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs),
341 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
342 block_target=state['target'],
345 print 'Generating! Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
346 #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
347 #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
348 transactions = [generate_tx] + [tx.tx for tx in extra_txs]
349 merkle_root = bitcoin.data.merkle_hash(transactions)
350 merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
352 timestamp = int(time.time() - current_work2.value['clock_offset'])
353 if state['best_share_hash'] is not None:
354 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
355 if timestamp2 > timestamp:
356 print 'Toff', timestamp2 - timestamp
357 timestamp = timestamp2
358 target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
359 times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
360 #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
361 return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
366 def got_response(data):
368 # match up with transactions
369 header = bitcoin.getwork.decode_data(data)
370 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
371 if transactions is None:
372 print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
374 block = dict(header=header, txs=transactions)
375 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
376 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
377 if factory.conn.value is not None:
378 factory.conn.value.send_block(block=block)
380 print 'No bitcoind connection! Erp!'
381 if hash_ <= block['header']['target']:
383 print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
385 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
387 print 'Received invalid share from worker - %x/%x' % (hash_, target)
389 share = p2pool.Share.from_block(block)
390 my_shares.add(share.hash)
391 print 'GOT SHARE! %s prev %s age %.2fs' % (p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
392 good = share.previous_hash == current_work.value['best_share_hash']
393 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
395 # eg. good = share.hash == current_work.value['best_share_hash'] here
398 log.err(None, 'Error processing data received from worker:')
401 web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
404 if current_work.value['best_share_hash'] is not None:
405 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
406 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height, 720))
407 return json.dumps(att_s)
408 return json.dumps(None)
411 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
412 weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
414 for script in sorted(weights, key=lambda s: weights[s]):
415 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
416 return json.dumps(res)
418 class WebInterface(resource.Resource):
419 def __init__(self, func, mime_type):
420 self.func, self.mime_type = func, mime_type
422 def render_GET(self, request):
423 request.setHeader('Content-Type', self.mime_type)
426 web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
427 web_root.putChild('users', WebInterface(get_users, 'application/json'))
429 web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
431 reactor.listenTCP(args.worker_port, server.Site(web_root))
438 tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
439 get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
442 def __init__(self, tx, seen_at_block):
443 self.hash = bitcoin.data.tx_type.hash256(tx)
445 self.seen_at_block = seen_at_block
446 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
448 #print '%x %r' % (seen_at_block, tx)
449 #for mention in self.mentions:
450 # print '%x' % mention
452 self.parents_all_in_blocks = False
455 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
456 self._find_parents_in_blocks()
458 @defer.inlineCallbacks
459 def _find_parents_in_blocks(self):
460 for tx_in in self.tx['tx_ins']:
462 raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
465 self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
466 #print raw_transaction
467 if not raw_transaction['parent_blocks']:
469 self.parents_all_in_blocks = True
472 if not self.parents_all_in_blocks:
478 @defer.inlineCallbacks
481 assert isinstance(tx_hash, (int, long))
482 #print 'REQUESTING', tx_hash
483 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
485 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
487 log.err(None, 'Error handling tx:')
488 # disable for now, for testing impact on stales
489 #factory.new_tx.watch(new_tx)
491 def new_block(block_hash):
492 work_updated.happened()
493 factory.new_block.watch(new_block)
495 print 'Started successfully!'
498 ht.updated.watch(set_real_work2)
500 @defer.inlineCallbacks
503 flag = work_updated.get_deferred()
505 yield set_real_work1()
508 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/20))], fireOnOneCallback=True)
510 @defer.inlineCallbacks
517 yield deferral.sleep(random.expovariate(1/20))
522 counter = skiplists.CountsSkipList(tracker, run_identifier)
525 yield deferral.sleep(3)
527 if current_work.value['best_share_hash'] is not None:
528 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
530 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 120))
531 weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
532 matching_in_chain = counter(current_work.value['best_share_hash'], height)
533 shares_in_chain = my_shares & matching_in_chain
534 stale_shares = my_shares - matching_in_chain
535 print 'Pool: %sH/s in %i shares Recent: %.02f%% >%sH/s Shares: %i (%i stale) Peers: %i' % (
538 weights.get(my_script, 0)/total_weight*100,
539 math.format(weights.get(my_script, 0)/total_weight*att_s),
540 len(shares_in_chain) + len(stale_shares),
543 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
544 #weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 100), 2**100)
545 #for k, v in weights.iteritems():
546 # print k.encode('hex'), v/total_weight
550 log.err(None, 'Fatal error:')
554 parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,))
555 parser.add_argument('--version', action='version', version=p2pool_init.__version__)
556 parser.add_argument('--testnet',
557 help='use the testnet',
558 action='store_const', const=p2pool.Testnet, default=p2pool.Mainnet, dest='net')
559 parser.add_argument('--debug',
560 help='debugging mode',
561 action='store_const', const=True, default=False, dest='debug')
562 parser.add_argument('-a', '--address',
563 help='generate to this address (defaults to requesting one from bitcoind)',
564 type=str, action='store', default=None, dest='address')
565 parser.add_argument('--charts',
566 help='generate charts on the web interface (requires PIL and pygame)',
567 action='store_const', const=True, default=False, dest='charts')
569 p2pool_group = parser.add_argument_group('p2pool interface')
570 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
571 help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
572 type=int, action='store', default=None, dest='p2pool_port')
573 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
574 help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
575 type=str, action='append', default=[], dest='p2pool_nodes')
576 parser.add_argument('-l', '--low-bandwidth',
577 help='trade lower bandwidth usage for higher latency (reduced efficiency)',
578 action='store_true', default=False, dest='low_bandwidth')
579 parser.add_argument('--disable-upnp',
580 help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
581 action='store_false', default=True, dest='upnp')
583 worker_group = parser.add_argument_group('worker interface')
584 worker_group.add_argument('-w', '--worker-port', metavar='PORT',
585 help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
586 type=int, action='store', default=9332, dest='worker_port')
588 bitcoind_group = parser.add_argument_group('bitcoind interface')
589 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
590 help='connect to a bitcoind at this address (default: 127.0.0.1)',
591 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
592 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
593 help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
594 type=int, action='store', default=8332, dest='bitcoind_rpc_port')
595 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
596 help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
597 type=int, action='store', default=None, dest='bitcoind_p2p_port')
599 bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
600 help='bitcoind RPC interface username',
601 type=str, action='store', dest='bitcoind_rpc_username')
602 bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
603 help='bitcoind RPC interface password',
604 type=str, action='store', dest='bitcoind_rpc_password')
606 args = parser.parse_args()
609 p2pool_init.DEBUG = True
610 class ReopeningFile(object):
611 def __init__(self, *open_args, **open_kwargs):
612 self.open_args, self.open_kwargs = open_args, open_kwargs
613 self.inner_file = open(*self.open_args, **self.open_kwargs)
615 self.inner_file.close()
616 self.inner_file = open(*self.open_args, **self.open_kwargs)
617 def write(self, data):
618 self.inner_file.write(data)
620 self.inner_file.flush()
621 class TeePipe(object):
622 def __init__(self, outputs):
623 self.outputs = outputs
624 def write(self, data):
625 for output in self.outputs:
628 for output in self.outputs:
630 class TimestampingPipe(object):
631 def __init__(self, inner_file):
632 self.inner_file = inner_file
635 def write(self, data):
636 buf = self.buf + data
637 lines = buf.split('\n')
638 for line in lines[:-1]:
639 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
640 self.inner_file.flush()
644 logfile = ReopeningFile(os.path.join(os.path.dirname(sys.argv[0]), 'debug.log'), 'w')
645 sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
646 if hasattr(signal, "SIGUSR1"):
647 def sigusr1(signum, frame):
648 print '''Caught SIGUSR1, closing 'debug.log'...'''
650 print '''...and reopened 'debug.log' after catching SIGUSR1.'''
651 signal.signal(signal.SIGUSR1, sigusr1)
653 if args.bitcoind_p2p_port is None:
654 args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
656 if args.p2pool_port is None:
657 args.p2pool_port = args.net.P2P_PORT
659 if args.address is not None:
661 args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
663 raise ValueError('error parsing address: ' + repr(e))
665 args.pubkey_hash = None
667 reactor.callWhenRunning(main, args)