improved peer deciding for downloading shares
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2
3 from __future__ import division
4
5 import argparse
6 import itertools
7 import os
8 import random
9 import sqlite3
10 import struct
11 import sys
12 import time
13
14 from twisted.internet import defer, reactor
15 from twisted.web import server
16 from twisted.python import log
17
18 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
19 from util import db, expiring_dict, jsonrpc, variable, deferral, math, skiplist
20 from . import p2p, worker_interface
21 import p2pool.data as p2pool
22 import p2pool as p2pool_init
23
24 @deferral.retry('Error getting work from bitcoind:', 3)
25 @defer.inlineCallbacks
26 def getwork(bitcoind):
27     # a block could arrive in between these two queries
28     getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
29     try:
30         getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
31     finally:
32         # get rid of residual errors
33         getwork_df.addErrback(lambda fail: None)
34         height_df.addErrback(lambda fail: None)
35     defer.returnValue((getwork, height))
36
37 @deferral.retry('Error getting payout script from bitcoind:', 1)
38 @defer.inlineCallbacks
39 def get_payout_script(factory):
40     res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
41     if res['reply'] == 'success':
42         my_script = res['script']
43     elif res['reply'] == 'denied':
44         my_script = None
45     else:
46         raise ValueError('Unexpected reply: %r' % (res,))
47
48 @deferral.retry('Error creating payout script:', 10)
49 @defer.inlineCallbacks
50 def get_payout_script2(bitcoind, net):
51     defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
52
53 @defer.inlineCallbacks
54 def main(args):
55     try:
56         print 'p2pool (version %s)' % (p2pool_init.__version__,)
57         print
58         
59         # connect to bitcoind over JSON-RPC and do initial getwork
60         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
61         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
62         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
63         temp_work, temp_height = yield getwork(bitcoind)
64         print '    ...success!'
65         print '    Current block hash: %x height: %i' % (temp_work.previous_block, temp_height)
66         print
67         
68         # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
69         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
70         factory = bitcoin.p2p.ClientFactory(args.net)
71         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
72         my_script = yield get_payout_script(factory)
73         if args.pubkey_hash is None:
74             if my_script is None:
75                 print 'IP transaction denied ... falling back to sending to address.'
76                 my_script = yield get_payout_script2(bitcoind, args.net)
77         else:
78             my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
79         print '    ...success!'
80         print '    Payout script:', my_script.encode('hex')
81         print
82         
83         ht = bitcoin.p2p.HeightTracker(factory)
84         
85         tracker = p2pool.OkayTracker(args.net)
86         chains = expiring_dict.ExpiringDict(300)
87         def get_chain(chain_id_data):
88             return chains.setdefault(chain_id_data, Chain(chain_id_data))
89         
90         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
91         
92         # information affecting work that should trigger a long-polling update
93         current_work = variable.Variable(None)
94         # information affecting work that should not trigger a long-polling update
95         current_work2 = variable.Variable(None)
96         
97         work_updated = variable.Event()
98         tracker_updated = variable.Event()
99         
100         requested = expiring_dict.ExpiringDict(300)
101         
102         @defer.inlineCallbacks
103         def set_real_work1():
104             work, height = yield getwork(bitcoind)
105             # XXX call tracker_updated
106             current_work.set(dict(
107                 version=work.version,
108                 previous_block=work.previous_block,
109                 target=work.target,
110                 height=height,
111                 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
112             ))
113             current_work2.set(dict(
114                 clock_offset=time.time() - work.timestamp,
115             ))
116         
117         @defer.inlineCallbacks
118         def set_real_work2():
119             best, desired = yield tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
120             
121             t = dict(current_work.value)
122             t['best_share_hash'] = best
123             current_work.set(t)
124             
125             for peer2, share_hash in desired:
126                 last_request_time, count = requested.get(share_hash, (None, 0))
127                 if last_request_time is not None and last_request_time - 5 < time.time() < last_request_time + 10 * 1.5**count:
128                     continue
129                 potential_peers = set()
130                 for head in tracker.tails[share_hash]:
131                     potential_peers.update(peer_heads.get(head, set()))
132                 potential_peers = [peer for peer in potential_peers if peer.connected2]
133                 if count == 0 and peer2 is not None and peer2.connected2:
134                     peer = peer2
135                 else:
136                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
137                     if peer is None:
138                         continue
139                 
140                 print 'Requesting parent share %x from %s' % (share_hash % 2**32, '%s:%i' % peer.addr)
141                 peer.send_getshares(
142                     hashes=[share_hash],
143                     parents=2000,
144                     stops=list(set(tracker.heads) | set(
145                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
146                     )),
147                 )
148                 requested[share_hash] = time.time(), count + 1
149         
150         print 'Initializing work...'
151         yield set_real_work1()
152         yield set_real_work2()
153         print '    ...success!'
154         
155         start_time = time.time() - current_work2.value['clock_offset']
156         
157         # setup p2p logic and join p2pool network
158         
159         def share_share(share, ignore_peer=None):
160             for peer in p2p_node.peers.itervalues():
161                 if peer is ignore_peer:
162                     continue
163                 peer.send_shares([share])
164             share.flag_shared()
165         
166         def p2p_shares(shares, peer=None):
167             if len(shares) > 5:
168                 print "Processing %i shares..." % (len(shares),)
169             
170             some_new = False
171             for share in shares:
172                 if share.hash in tracker.shares:
173                     #print 'Got duplicate share, ignoring. Hash: %x' % (share.hash % 2**32,)
174                     continue
175                 some_new = True
176                 
177                 #print 'Received share %x from %r' % (share.hash % 2**32, share.peer.addr if share.peer is not None else None)
178                 
179                 tracker.add(share)
180                 #for peer2, share_hash in desired:
181                 #    print 'Requesting parent share %x' % (share_hash,)
182                 #    peer2.send_getshares(hashes=[share_hash], parents=2000)
183                 
184                 if share.bitcoin_hash <= share.header['target']:
185                     print
186                     print 'GOT BLOCK! Passing to bitcoind! %x bitcoin: %x' % (share.hash % 2**32, share.bitcoin_hash,)
187                     print
188                     if factory.conn.value is not None:
189                         factory.conn.value.send_block(block=share.as_block(tracker, args.net))
190                     else:
191                         print 'No bitcoind connection! Erp!'
192             
193             if shares and peer is not None:
194                 peer_heads.setdefault(shares[0].hash, set()).add(peer)
195             
196             if some_new:
197                 tracker_updated.happened()
198             
199             if len(shares) > 5:
200                 print "... done processing %i shares." % (len(shares),)
201         
202         def p2p_share_hashes(share_hashes, peer):
203             get_hashes = []
204             for share_hash in share_hashes:
205                 if share_hash in tracker.shares:
206                     pass # print 'Got share hash, already have, ignoring. Hash: %x' % (share_hash % 2**32,)
207                 else:
208                     print 'Got share hash, requesting! Hash: %x' % (share_hash % 2**32,)
209                     get_hashes.append(share_hash)
210             
211             if share_hashes and peer is not None:
212                 peer_heads.setdefault(share_hashes[0], set()).add(peer)
213             if get_hashes:
214                 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
215         
216         def p2p_get_shares(share_hashes, parents, stops, peer):
217             parents = min(parents, 1000//len(share_hashes))
218             stops = set(stops)
219             shares = []
220             for share_hash in share_hashes:
221                 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
222                     if share.hash in stops:
223                         break
224                     shares.append(share)
225             peer.send_shares(shares, full=True)
226         
227         print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
228         
229         def parse(x):
230             if ':' in x:
231                 ip, port = x.split(':')
232                 return ip, int(port)
233             else:
234                 return x, args.net.P2P_PORT
235         
236         nodes = [
237             ('72.14.191.28', args.net.P2P_PORT),
238             ('62.204.197.159', args.net.P2P_PORT),
239         ]
240         try:
241             nodes.append(((yield reactor.resolve('p2pool.forre.st')), args.net.P2P_PORT))
242         except:
243             print
244             print 'Error resolving bootstrap node IP:'
245             log.err()
246             print
247         
248         p2p_node = p2p.Node(
249             current_work=current_work,
250             port=args.p2pool_port,
251             net=args.net,
252             addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
253             mode=0 if args.low_bandwidth else 1,
254             preferred_addrs=map(parse, args.p2pool_nodes) + nodes,
255         )
256         p2p_node.handle_shares = p2p_shares
257         p2p_node.handle_share_hashes = p2p_share_hashes
258         p2p_node.handle_get_shares = p2p_get_shares
259         
260         p2p_node.start()
261         
262         # send share when the chain changes to their chain
263         def work_changed(new_work):
264             #print 'Work changed:', new_work
265             for share in tracker.get_chain_known(new_work['best_share_hash']):
266                 if share.shared:
267                     break
268                 share_share(share, share.peer)
269         current_work.changed.watch(work_changed)
270         
271         print '    ...success!'
272         print
273         
274         # start listening for workers with a JSON-RPC server
275         
276         print 'Listening for workers on port %i...' % (args.worker_port,)
277         
278         # setup worker logic
279         
280         merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
281         run_identifier = struct.pack('<Q', random.randrange(2**64))
282         
283         def compute(state, all_targets):
284             pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
285             pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
286             extra_txs = []
287             size = 0
288             for tx in pre_extra_txs:
289                 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
290                 if size + this_size > 500000:
291                     break
292                 extra_txs.append(tx)
293                 size += this_size
294             # XXX check sigops!
295             # XXX assuming generate_tx is smallish here..
296             generate_tx = p2pool.generate_transaction(
297                 tracker=tracker,
298                 previous_share_hash=state['best_share_hash'],
299                 new_script=my_script,
300                 subsidy=(50*100000000 >> (state['height'] + 1)//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs),
301                 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
302                 block_target=state['target'],
303                 net=args.net,
304             )
305             print 'Generating! Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
306             #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
307             #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
308             transactions = [generate_tx] + [tx.tx for tx in extra_txs]
309             merkle_root = bitcoin.data.merkle_hash(transactions)
310             merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
311             
312             timestamp = int(time.time() - current_work2.value['clock_offset'])
313             if state['best_share_hash'] is not None:
314                 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
315                 if timestamp2 > timestamp:
316                     print 'Toff', timestamp2 - timestamp
317                     timestamp = timestamp2
318             target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
319             if not all_targets:
320                 target2 = min(2**256//2**32 - 1, target2)
321             times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
322             #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
323             return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
324         
325         my_shares = set()
326         times = {}
327         
328         def got_response(data):
329             try:
330                 # match up with transactions
331                 header = bitcoin.getwork.decode_data(data)
332                 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
333                 if transactions is None:
334                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
335                     return False
336                 block = dict(header=header, txs=transactions)
337                 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
338                 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
339                     print
340                     print 'GOT BLOCK! Passing to bitcoind! %x' % (hash_,)
341                     print
342                     if factory.conn.value is not None:
343                         factory.conn.value.send_block(block=block)
344                     else:
345                         print 'No bitcoind connection! Erp!'
346                 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
347                 if hash_ > target:
348                     print 'Received invalid share from worker - %x/%x' % (hash_, target)
349                     return False
350                 share = p2pool.Share.from_block(block)
351                 my_shares.add(share.hash)
352                 print 'GOT SHARE! %x prev %x' % (share.hash % 2**32, 0 if share.previous_hash is None else share.previous_hash % 2**32), "DEAD ON ARRIVAL" if share.previous_hash != current_work.value['best_share_hash'] else "", time.time() - times[share.nonce], "s since getwork"
353                 good = share.previous_hash == current_work.value['best_share_hash']
354                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
355                 p2p_shares([share])
356                 # eg. good = share.hash == current_work.value['best_share_hash'] here
357                 return good
358             except:
359                 print
360                 print 'Error processing data received from worker:'
361                 log.err()
362                 print
363                 return False
364         
365         def get_rate():
366             if current_work.value['best_share_hash'] is not None:
367                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
368                 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net)
369                 return att_s
370         
371         reactor.listenTCP(args.worker_port, server.Site(worker_interface.WorkerInterface(current_work, compute, got_response, get_rate)))
372         
373         print '    ...success!'
374         print
375         
376         # done!
377         
378         tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
379         get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
380         
381         class Tx(object):
382             def __init__(self, tx, seen_at_block):
383                 self.hash = bitcoin.data.tx_type.hash256(tx)
384                 self.tx = tx
385                 self.seen_at_block = seen_at_block
386                 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
387                 #print
388                 #print '%x %r' % (seen_at_block, tx)
389                 #for mention in self.mentions:
390                 #    print '%x' % mention
391                 #print
392                 self.parents_all_in_blocks = False
393                 self.value_in = 0
394                 #print self.tx
395                 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
396                 self._find_parents_in_blocks()
397             
398             @defer.inlineCallbacks
399             def _find_parents_in_blocks(self):
400                 for tx_in in self.tx['tx_ins']:
401                     try:
402                         raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
403                     except Exception:
404                         return
405                     self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
406                     #print raw_transaction
407                     if not raw_transaction['parent_blocks']:
408                         return
409                 self.parents_all_in_blocks = True
410             
411             def is_good(self):
412                 if not self.parents_all_in_blocks:
413                     return False
414                 x = self.is_good2()
415                 #print 'is_good:', x
416                 return x
417         
418         @defer.inlineCallbacks
419         def new_tx(tx_hash):
420             try:
421                 assert isinstance(tx_hash, (int, long))
422                 #print "REQUESTING", tx_hash
423                 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
424                 #print "GOT", tx
425                 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
426             except:
427                 print
428                 print 'Error handling tx:'
429                 log.err()
430                 print
431         # disable for now, for testing impact on stales
432         #factory.new_tx.watch(new_tx)
433         
434         def new_block(block_hash):
435             work_updated.happened()
436         factory.new_block.watch(new_block)
437         
438         print 'Started successfully!'
439         print
440         
441         @defer.inlineCallbacks
442         def work1_thread():
443             while True:
444                 flag = work_updated.get_deferred()
445                 try:
446                     yield set_real_work1()
447                 except:
448                     log.err()
449                 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/1))], fireOnOneCallback=True)
450         
451         @defer.inlineCallbacks
452         def work2_thread():
453             while True:
454                 flag = tracker_updated.get_deferred()
455                 try:
456                     yield set_real_work2()
457                 except:
458                     log.err()
459                 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/1))], fireOnOneCallback=True)
460         
461         work1_thread()
462         work2_thread()
463         
464         counter = skiplist.CountsSkipList(tracker, my_script, run_identifier)
465         
466         while True:
467             yield deferral.sleep(random.expovariate(1/1))
468             try:
469                 if current_work.value['best_share_hash'] is not None:
470                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
471                     if height > 5:
472                         att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net)
473                         weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
474                         count = counter(current_work.value['best_share_hash'], height, 2**100)
475                         print 'Pool: %sH/s in %i shares Recent: %.02f%% >%sH/s Shares: %i (%i stale)' % (
476                             math.format(att_s),
477                             height,
478                             weights.get(my_script, 0)/total_weight*100,
479                             math.format(weights.get(my_script, 0)/total_weight*att_s),
480                             len(my_shares),
481                             len(my_shares) - count,
482                         )
483                         #weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 100), 2**100)
484                         #for k, v in weights.iteritems():
485                         #    print k.encode('hex'), v/total_weight
486             except:
487                 log.err()
488     except:
489         print
490         print 'Fatal error:'
491         log.err()
492         print
493         reactor.stop()
494
495 def run():
496     parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,))
497     parser.add_argument('--version', action='version', version=p2pool_init.__version__)
498     parser.add_argument('--testnet',
499         help='use the testnet',
500         action='store_const', const=p2pool.Testnet, default=p2pool.Mainnet, dest='net')
501     parser.add_argument('--debug',
502         help='debugging mode',
503         action='store_const', const=True, default=False, dest='debug')
504     parser.add_argument('-a', '--address',
505         help='generate to this address (defaults to requesting one from bitcoind)',
506         type=str, action='store', default=None, dest='address')
507     
508     p2pool_group = parser.add_argument_group('p2pool interface')
509     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
510         help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
511         type=int, action='store', default=None, dest='p2pool_port')
512     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
513         help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
514         type=str, action='append', default=[], dest='p2pool_nodes')
515     parser.add_argument('-l', '--low-bandwidth',
516         help='trade lower bandwidth usage for higher latency (reduced efficiency)',
517         action='store_true', default=False, dest='low_bandwidth')
518     
519     worker_group = parser.add_argument_group('worker interface')
520     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
521         help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
522         type=int, action='store', default=9332, dest='worker_port')
523     
524     bitcoind_group = parser.add_argument_group('bitcoind interface')
525     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
526         help='connect to a bitcoind at this address (default: 127.0.0.1)',
527         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
528     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
529         help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
530         type=int, action='store', default=8332, dest='bitcoind_rpc_port')
531     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
532         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
533         type=int, action='store', default=None, dest='bitcoind_p2p_port')
534     
535     bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
536         help='bitcoind RPC interface username',
537         type=str, action='store', dest='bitcoind_rpc_username')
538     bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
539         help='bitcoind RPC interface password',
540         type=str, action='store', dest='bitcoind_rpc_password')
541     
542     args = parser.parse_args()
543     
544     if args.debug:
545         p2pool_init.DEBUG = True
546         class TimestampingPipe(object):
547             def __init__(self, inner_file):
548                 self.inner_file = inner_file
549                 self.buf = ""
550             def write(self, data):
551                 buf = self.buf + data
552                 lines = buf.split('\n')
553                 for line in lines[:-1]:
554                     self.inner_file.write("%s %s\n" % (time.strftime("%H:%M:%S"), line))
555                 self.buf = lines[-1]
556         sys.stdout = TimestampingPipe(sys.stdout)
557         sys.stderr = TimestampingPipe(sys.stderr)
558     
559     if args.bitcoind_p2p_port is None:
560         args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
561     
562     if args.p2pool_port is None:
563         args.p2pool_port = args.net.P2P_PORT
564     
565     if args.address is not None:
566         try:
567             args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
568         except Exception, e:
569             raise ValueError("error parsing address: " + repr(e))
570     else:
571         args.pubkey_hash = None
572     
573     reactor.callWhenRunning(main, args)
574     reactor.run()