import, naming, whitespace cleanup
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2
3 from __future__ import division
4
5 import argparse
6 import itertools
7 import os
8 import random
9 import sqlite3
10 import struct
11 import sys
12 import time
13
14 from twisted.internet import defer, reactor
15 from twisted.web import server
16 from twisted.python import log
17
18 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
19 from util import db, expiring_dict, jsonrpc, variable, deferral, math, skiplist
20 from . import p2p, worker_interface
21 import p2pool.data as p2pool
22 import p2pool as p2pool_init
23
24 @deferral.retry('Error getting work from bitcoind:', 3)
25 @defer.inlineCallbacks
26 def getwork(bitcoind):
27     # a block could arrive in between these two queries
28     getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
29     try:
30         getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
31     finally:
32         # get rid of residual errors
33         getwork_df.addErrback(lambda fail: None)
34         height_df.addErrback(lambda fail: None)
35     defer.returnValue((getwork, height))
36
37 @deferral.retry('Error getting payout script from bitcoind:', 1)
38 @defer.inlineCallbacks
39 def get_payout_script(factory):
40     res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
41     if res['reply'] == 'success':
42         my_script = res['script']
43     elif res['reply'] == 'denied':
44         my_script = None
45     else:
46         raise ValueError('Unexpected reply: %r' % (res,))
47
48 @deferral.retry('Error creating payout script:', 10)
49 @defer.inlineCallbacks
50 def get_payout_script2(bitcoind, net):
51     defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
52
53 @defer.inlineCallbacks
54 def main(args):
55     try:
56         print 'p2pool (version %s)' % (p2pool_init.__version__,)
57         print
58         
59         # connect to bitcoind over JSON-RPC and do initial getwork
60         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
61         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
62         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
63         temp_work, temp_height = yield getwork(bitcoind)
64         print '    ...success!'
65         print '    Current block hash: %x height: %i' % (temp_work.previous_block, temp_height)
66         print
67         
68         # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
69         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
70         factory = bitcoin.p2p.ClientFactory(args.net)
71         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
72         my_script = yield get_payout_script(factory)
73         if args.pubkey_hash is None:
74             if my_script is None:
75                 print 'IP transaction denied ... falling back to sending to address.'
76                 my_script = yield get_payout_script2(bitcoind, args.net)
77         else:
78             my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
79         print '    ...success!'
80         print '    Payout script:', my_script.encode('hex')
81         print
82         
83         ht = bitcoin.p2p.HeightTracker(factory)
84         
85         tracker = p2pool.OkayTracker(args.net)
86         chains = expiring_dict.ExpiringDict(300)
87         def get_chain(chain_id_data):
88             return chains.setdefault(chain_id_data, Chain(chain_id_data))
89         
90         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
91         
92         # information affecting work that should trigger a long-polling update
93         current_work = variable.Variable(None)
94         # information affecting work that should not trigger a long-polling update
95         current_work2 = variable.Variable(None)
96         
97         work_updated = variable.Event()
98         tracker_updated = variable.Event()
99         
100         requested = expiring_dict.ExpiringDict(300)
101         
102         @defer.inlineCallbacks
103         def set_real_work1():
104             work, height = yield getwork(bitcoind)
105             # XXX call tracker_updated
106             current_work.set(dict(
107                 version=work.version,
108                 previous_block=work.previous_block,
109                 target=work.target,
110                 height=height,
111                 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
112             ))
113             current_work2.set(dict(
114                 clock_offset=time.time() - work.timestamp,
115             ))
116         
117         @defer.inlineCallbacks
118         def set_real_work2():
119             best, desired = yield tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
120             
121             t = dict(current_work.value)
122             t['best_share_hash'] = best
123             current_work.set(t)
124             
125             for peer2, share_hash in desired:
126                 last_request_time = requested.get(share_hash, None)
127                 if last_request_time is not None and last_request_time - 5 < time.time() < last_request_time + 10:
128                     continue
129                 potential_peers = set()
130                 for head in tracker.tails[share_hash]:
131                     potential_peers.update(peer_heads.get(head, set()))
132                 potential_peers = [peer for peer in potential_peers if peer.connected2]
133                 peer = random.choice(potential_peers) if potential_peers and random.random() > .5 else peer2
134                 if peer is None:
135                     continue
136                 
137                 print 'Requesting parent share %x from %s' % (share_hash % 2**32, '%s:%i' % peer.addr)
138                 peer.send_getshares(
139                     hashes=[share_hash],
140                     parents=2000,
141                     stops=list(set(tracker.heads) | set(
142                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
143                     )),
144                 )
145                 requested[share_hash] = time.time()
146         
147         print 'Initializing work...'
148         yield set_real_work1()
149         yield set_real_work2()
150         print '    ...success!'
151         
152         start_time = time.time() - current_work2.value['clock_offset']
153         
154         # setup p2p logic and join p2pool network
155         
156         def share_share(share, ignore_peer=None):
157             for peer in p2p_node.peers.itervalues():
158                 if peer is ignore_peer:
159                     continue
160                 peer.send_shares([share])
161             share.flag_shared()
162         
163         def p2p_shares(shares, peer=None):
164             if len(shares) > 5:
165                 print "Processing %i shares..." % (len(shares),)
166             
167             some_new = False
168             for share in shares:
169                 if share.hash in tracker.shares:
170                     #print 'Got duplicate share, ignoring. Hash: %x' % (share.hash % 2**32,)
171                     continue
172                 some_new = True
173                 
174                 #print 'Received share %x from %r' % (share.hash % 2**32, share.peer.addr if share.peer is not None else None)
175                 
176                 tracker.add(share)
177                 #for peer2, share_hash in desired:
178                 #    print 'Requesting parent share %x' % (share_hash,)
179                 #    peer2.send_getshares(hashes=[share_hash], parents=2000)
180                 
181                 if share.bitcoin_hash <= share.header['target']:
182                     print
183                     print 'GOT BLOCK! Passing to bitcoind! %x bitcoin: %x' % (share.hash % 2**32, share.bitcoin_hash,)
184                     print
185                     if factory.conn.value is not None:
186                         factory.conn.value.send_block(block=share.as_block(tracker, args.net))
187                     else:
188                         print 'No bitcoind connection! Erp!'
189             
190             if shares and peer is not None:
191                 peer_heads.setdefault(shares[0].hash, set()).add(peer)
192             
193             if some_new:
194                 tracker_updated.happened()
195             
196             if len(shares) > 5:
197                 print "... done processing %i shares." % (len(shares),)
198         
199         def p2p_share_hashes(share_hashes, peer):
200             get_hashes = []
201             for share_hash in share_hashes:
202                 if share_hash in tracker.shares:
203                     pass # print 'Got share hash, already have, ignoring. Hash: %x' % (share_hash % 2**32,)
204                 else:
205                     print 'Got share hash, requesting! Hash: %x' % (share_hash % 2**32,)
206                     get_hashes.append(share_hash)
207             
208             if share_hashes and peer is not None:
209                 peer_heads.setdefault(share_hashes[0], set()).add(peer)
210             if get_hashes:
211                 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
212         
213         def p2p_get_shares(share_hashes, parents, stops, peer):
214             parents = min(parents, 1000//len(share_hashes))
215             stops = set(stops)
216             shares = []
217             for share_hash in share_hashes:
218                 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
219                     if share.hash in stops:
220                         break
221                     shares.append(share)
222             peer.send_shares(shares, full=True)
223         
224         print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
225         
226         def parse(x):
227             if ':' in x:
228                 ip, port = x.split(':')
229                 return ip, int(port)
230             else:
231                 return x, args.net.P2P_PORT
232         
233         nodes = [
234             ('72.14.191.28', args.net.P2P_PORT),
235             ('62.204.197.159', args.net.P2P_PORT),
236         ]
237         try:
238             nodes.append(((yield reactor.resolve('p2pool.forre.st')), args.net.P2P_PORT))
239         except:
240             print
241             print 'Error resolving bootstrap node IP:'
242             log.err()
243             print
244         
245         p2p_node = p2p.Node(
246             current_work=current_work,
247             port=args.p2pool_port,
248             net=args.net,
249             addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
250             mode=0 if args.low_bandwidth else 1,
251             preferred_addrs=map(parse, args.p2pool_nodes) + nodes,
252         )
253         p2p_node.handle_shares = p2p_shares
254         p2p_node.handle_share_hashes = p2p_share_hashes
255         p2p_node.handle_get_shares = p2p_get_shares
256         
257         p2p_node.start()
258         
259         # send share when the chain changes to their chain
260         def work_changed(new_work):
261             #print 'Work changed:', new_work
262             for share in tracker.get_chain_known(new_work['best_share_hash']):
263                 if share.shared:
264                     break
265                 share_share(share, share.peer)
266         current_work.changed.watch(work_changed)
267         
268         print '    ...success!'
269         print
270         
271         # start listening for workers with a JSON-RPC server
272         
273         print 'Listening for workers on port %i...' % (args.worker_port,)
274         
275         # setup worker logic
276         
277         merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
278         run_identifier = struct.pack('<Q', random.randrange(2**64))
279         
280         def compute(state, all_targets):
281             pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
282             pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
283             extra_txs = []
284             size = 0
285             for tx in pre_extra_txs:
286                 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
287                 if size + this_size > 500000:
288                     break
289                 extra_txs.append(tx)
290                 size += this_size
291             # XXX check sigops!
292             # XXX assuming generate_tx is smallish here..
293             generate_tx = p2pool.generate_transaction(
294                 tracker=tracker,
295                 previous_share_hash=state['best_share_hash'],
296                 new_script=my_script,
297                 subsidy=(50*100000000 >> (state['height'] + 1)//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs),
298                 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
299                 block_target=state['target'],
300                 net=args.net,
301             )
302             print 'Generating! Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
303             #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
304             #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
305             transactions = [generate_tx] + [tx.tx for tx in extra_txs]
306             merkle_root = bitcoin.data.merkle_hash(transactions)
307             merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
308             
309             timestamp = int(time.time() - current_work2.value['clock_offset'])
310             if state['best_share_hash'] is not None:
311                 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
312                 if timestamp2 > timestamp:
313                     print 'Toff', timestamp2 - timestamp
314                     timestamp = timestamp2
315             target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
316             if not all_targets:
317                 target2 = min(2**256//2**32 - 1, target2)
318             times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
319             #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
320             return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
321         
322         my_shares = set()
323         times = {}
324         
325         def got_response(data):
326             try:
327                 # match up with transactions
328                 header = bitcoin.getwork.decode_data(data)
329                 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
330                 if transactions is None:
331                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
332                     return False
333                 block = dict(header=header, txs=transactions)
334                 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
335                 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
336                     print
337                     print 'GOT BLOCK! Passing to bitcoind! %x' % (hash_,)
338                     print
339                     if factory.conn.value is not None:
340                         factory.conn.value.send_block(block=block)
341                     else:
342                         print 'No bitcoind connection! Erp!'
343                 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
344                 if hash_ > target:
345                     print 'Received invalid share from worker - %x/%x' % (hash_, target)
346                     return False
347                 share = p2pool.Share.from_block(block)
348                 my_shares.add(share.hash)
349                 print 'GOT SHARE! %x prev %x' % (share.hash % 2**32, 0 if share.previous_hash is None else share.previous_hash % 2**32), "DEAD ON ARRIVAL" if share.previous_hash != current_work.value['best_share_hash'] else "", time.time() - times[share.nonce], "s since getwork"
350                 good = share.previous_hash == current_work.value['best_share_hash']
351                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
352                 p2p_shares([share])
353                 # eg. good = share.hash == current_work.value['best_share_hash'] here
354                 return good
355             except:
356                 print
357                 print 'Error processing data received from worker:'
358                 log.err()
359                 print
360                 return False
361         
362         def get_rate():
363             if current_work.value['best_share_hash'] is not None:
364                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
365                 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net)
366                 return att_s
367         
368         reactor.listenTCP(args.worker_port, server.Site(worker_interface.WorkerInterface(current_work, compute, got_response, get_rate)))
369         
370         print '    ...success!'
371         print
372         
373         # done!
374         
375         tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
376         get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
377         
378         class Tx(object):
379             def __init__(self, tx, seen_at_block):
380                 self.hash = bitcoin.data.tx_type.hash256(tx)
381                 self.tx = tx
382                 self.seen_at_block = seen_at_block
383                 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
384                 #print
385                 #print '%x %r' % (seen_at_block, tx)
386                 #for mention in self.mentions:
387                 #    print '%x' % mention
388                 #print
389                 self.parents_all_in_blocks = False
390                 self.value_in = 0
391                 #print self.tx
392                 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
393                 self._find_parents_in_blocks()
394             
395             @defer.inlineCallbacks
396             def _find_parents_in_blocks(self):
397                 for tx_in in self.tx['tx_ins']:
398                     try:
399                         raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
400                     except Exception:
401                         return
402                     self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
403                     #print raw_transaction
404                     if not raw_transaction['parent_blocks']:
405                         return
406                 self.parents_all_in_blocks = True
407             
408             def is_good(self):
409                 if not self.parents_all_in_blocks:
410                     return False
411                 x = self.is_good2()
412                 #print 'is_good:', x
413                 return x
414         
415         @defer.inlineCallbacks
416         def new_tx(tx_hash):
417             try:
418                 assert isinstance(tx_hash, (int, long))
419                 #print "REQUESTING", tx_hash
420                 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
421                 #print "GOT", tx
422                 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
423             except:
424                 print
425                 print 'Error handling tx:'
426                 log.err()
427                 print
428         # disable for now, for testing impact on stales
429         #factory.new_tx.watch(new_tx)
430         
431         def new_block(block_hash):
432             work_updated.happened()
433         factory.new_block.watch(new_block)
434         
435         print 'Started successfully!'
436         print
437         
438         @defer.inlineCallbacks
439         def work1_thread():
440             while True:
441                 flag = work_updated.get_deferred()
442                 try:
443                     yield set_real_work1()
444                 except:
445                     log.err()
446                 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/1))], fireOnOneCallback=True)
447         
448         @defer.inlineCallbacks
449         def work2_thread():
450             while True:
451                 flag = tracker_updated.get_deferred()
452                 try:
453                     yield set_real_work2()
454                 except:
455                     log.err()
456                 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/1))], fireOnOneCallback=True)
457         
458         work1_thread()
459         work2_thread()
460         
461         counter = skiplist.CountsSkipList(tracker, my_script, run_identifier)
462         
463         while True:
464             yield deferral.sleep(random.expovariate(1/1))
465             try:
466                 if current_work.value['best_share_hash'] is not None:
467                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
468                     if height > 5:
469                         att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net)
470                         weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
471                         count = counter(current_work.value['best_share_hash'], height, 2**100)
472                         print 'Pool: %sH/s in %i shares Recent: %.02f%% >%sH/s Shares: %i (%i stale)' % (
473                             math.format(att_s),
474                             height,
475                             weights.get(my_script, 0)/total_weight*100,
476                             math.format(weights.get(my_script, 0)/total_weight*att_s),
477                             len(my_shares),
478                             len(my_shares) - count,
479                         )
480                         #weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 100), 2**100)
481                         #for k, v in weights.iteritems():
482                         #    print k.encode('hex'), v/total_weight
483             except:
484                 log.err()
485     except:
486         print
487         print 'Fatal error:'
488         log.err()
489         print
490         reactor.stop()
491
492 def run():
493     parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,))
494     parser.add_argument('--version', action='version', version=p2pool_init.__version__)
495     parser.add_argument('--testnet',
496         help='use the testnet',
497         action='store_const', const=p2pool.Testnet, default=p2pool.Mainnet, dest='net')
498     parser.add_argument('--debug',
499         help='debugging mode',
500         action='store_const', const=True, default=False, dest='debug')
501     parser.add_argument('-a', '--address',
502         help='generate to this address (defaults to requesting one from bitcoind)',
503         type=str, action='store', default=None, dest='address')
504     
505     p2pool_group = parser.add_argument_group('p2pool interface')
506     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
507         help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
508         type=int, action='store', default=None, dest='p2pool_port')
509     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
510         help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
511         type=str, action='append', default=[], dest='p2pool_nodes')
512     parser.add_argument('-l', '--low-bandwidth',
513         help='trade lower bandwidth usage for higher latency (reduced efficiency)',
514         action='store_true', default=False, dest='low_bandwidth')
515     
516     worker_group = parser.add_argument_group('worker interface')
517     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
518         help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
519         type=int, action='store', default=9332, dest='worker_port')
520     
521     bitcoind_group = parser.add_argument_group('bitcoind interface')
522     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
523         help='connect to a bitcoind at this address (default: 127.0.0.1)',
524         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
525     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
526         help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
527         type=int, action='store', default=8332, dest='bitcoind_rpc_port')
528     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
529         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
530         type=int, action='store', default=None, dest='bitcoind_p2p_port')
531     
532     bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
533         help='bitcoind RPC interface username',
534         type=str, action='store', dest='bitcoind_rpc_username')
535     bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
536         help='bitcoind RPC interface password',
537         type=str, action='store', dest='bitcoind_rpc_password')
538     
539     args = parser.parse_args()
540     
541     if args.debug:
542         p2pool_init.DEBUG = True
543         class TimestampingPipe(object):
544             def __init__(self, inner_file):
545                 self.inner_file = inner_file
546                 self.buf = ""
547             def write(self, data):
548                 buf = self.buf + data
549                 lines = buf.split('\n')
550                 for line in lines[:-1]:
551                     self.inner_file.write("%s %s\n" % (time.strftime("%H:%M:%S"), line))
552                 self.buf = lines[-1]
553         sys.stdout = TimestampingPipe(sys.stdout)
554         sys.stderr = TimestampingPipe(sys.stderr)
555     
556     if args.bitcoind_p2p_port is None:
557         args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
558     
559     if args.p2pool_port is None:
560         args.p2pool_port = args.net.P2P_PORT
561     
562     if args.address is not None:
563         try:
564             args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
565         except Exception, e:
566             raise ValueError("error parsing address: " + repr(e))
567     else:
568         args.pubkey_hash = None
569     
570     reactor.callWhenRunning(main, args)
571     reactor.run()