fixed resource leak and incorrect exception handling in nattraverso upnp library
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2
3 from __future__ import division
4
5 import argparse
6 import datetime
7 import itertools
8 import os
9 import random
10 import sqlite3
11 import struct
12 import sys
13 import time
14 import json
15 import signal
16
17 from twisted.internet import defer, reactor
18 from twisted.web import server, resource
19 from twisted.python import log
20 from nattraverso import portmapper, ipdiscover
21
22 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
23 from util import db, expiring_dict, jsonrpc, variable, deferral, math
24 from . import p2p, worker_interface, skiplists
25 import p2pool.data as p2pool
26 import p2pool as p2pool_init
27
28 @deferral.retry('Error getting work from bitcoind:', 3)
29 @defer.inlineCallbacks
30 def getwork(bitcoind):
31     # a block could arrive in between these two queries
32     getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
33     try:
34         getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
35     finally:
36         # get rid of residual errors
37         getwork_df.addErrback(lambda fail: None)
38         height_df.addErrback(lambda fail: None)
39     defer.returnValue((getwork, height))
40
41 @deferral.retry('Error getting payout script from bitcoind:', 1)
42 @defer.inlineCallbacks
43 def get_payout_script(factory):
44     res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
45     if res['reply'] == 'success':
46         defer.returnValue(res['script'])
47     elif res['reply'] == 'denied':
48         defer.returnValue(None)
49     else:
50         raise ValueError('Unexpected reply: %r' % (res,))
51
52 @deferral.retry('Error creating payout script:', 10)
53 @defer.inlineCallbacks
54 def get_payout_script2(bitcoind, net):
55     defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
56
57 @defer.inlineCallbacks
58 def main(args):
59     try:
60         if args.charts:
61             from . import draw
62         
63         print 'p2pool (version %s)' % (p2pool_init.__version__,)
64         print
65         
66         # connect to bitcoind over JSON-RPC and do initial getwork
67         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
68         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
69         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
70         temp_work, temp_height = yield getwork(bitcoind)
71         print '    ...success!'
72         print '    Current block hash: %x height: %i' % (temp_work.previous_block, temp_height)
73         print
74         
75         # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
76         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
77         factory = bitcoin.p2p.ClientFactory(args.net)
78         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
79         my_script = yield get_payout_script(factory)
80         if args.pubkey_hash is None:
81             if my_script is None:
82                 print '    IP transaction denied ... falling back to sending to address.'
83                 my_script = yield get_payout_script2(bitcoind, args.net)
84         else:
85             my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
86         print '    ...success!'
87         print '    Payout script:', my_script.encode('hex')
88         print
89         
90         ht = bitcoin.p2p.HeightTracker(factory)
91         
92         tracker = p2pool.OkayTracker(args.net)
93         chains = expiring_dict.ExpiringDict(300)
94         def get_chain(chain_id_data):
95             return chains.setdefault(chain_id_data, Chain(chain_id_data))
96         
97         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
98         
99         # information affecting work that should trigger a long-polling update
100         current_work = variable.Variable(None)
101         # information affecting work that should not trigger a long-polling update
102         current_work2 = variable.Variable(None)
103         
104         work_updated = variable.Event()
105         
106         requested = expiring_dict.ExpiringDict(300)
107         
108         @defer.inlineCallbacks
109         def set_real_work1():
110             work, height = yield getwork(bitcoind)
111             changed = work.previous_block != current_work.value['previous_block'] if current_work.value is not None else True
112             current_work.set(dict(
113                 version=work.version,
114                 previous_block=work.previous_block,
115                 target=work.target,
116                 height=height,
117                 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
118             ))
119             current_work2.set(dict(
120                 clock_offset=time.time() - work.timestamp,
121             ))
122             if changed:
123                 set_real_work2()
124         
125         def set_real_work2():
126             best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
127             
128             t = dict(current_work.value)
129             t['best_share_hash'] = best
130             current_work.set(t)
131             
132             t = time.time()
133             for peer2, share_hash in desired:
134                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
135                     continue
136                 last_request_time, count = requested.get(share_hash, (None, 0))
137                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
138                     continue
139                 potential_peers = set()
140                 for head in tracker.tails[share_hash]:
141                     potential_peers.update(peer_heads.get(head, set()))
142                 potential_peers = [peer for peer in potential_peers if peer.connected2]
143                 if count == 0 and peer2 is not None and peer2.connected2:
144                     peer = peer2
145                 else:
146                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
147                     if peer is None:
148                         continue
149                 
150                 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
151                 peer.send_getshares(
152                     hashes=[share_hash],
153                     parents=2000,
154                     stops=list(set(tracker.heads) | set(
155                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
156                     ))[:100],
157                 )
158                 requested[share_hash] = t, count + 1
159         
160         print 'Initializing work...'
161         yield set_real_work1()
162         set_real_work2()
163         print '    ...success!'
164         
165         start_time = time.time() - current_work2.value['clock_offset']
166         
167         # setup p2p logic and join p2pool network
168         
169         def share_share(share, ignore_peer=None):
170             for peer in p2p_node.peers.itervalues():
171                 if peer is ignore_peer:
172                     continue
173                 #if p2pool_init.DEBUG:
174                 #    print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
175                 peer.send_shares([share])
176             share.flag_shared()
177         
178         def p2p_shares(shares, peer=None):
179             if len(shares) > 5:
180                 print 'Processing %i shares...' % (len(shares),)
181             
182             some_new = False
183             for share in shares:
184                 if share.hash in tracker.shares:
185                     #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
186                     continue
187                 some_new = True
188                 
189                 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
190                 
191                 tracker.add(share)
192                 #for peer2, share_hash in desired:
193                 #    print 'Requesting parent share %x' % (share_hash,)
194                 #    peer2.send_getshares(hashes=[share_hash], parents=2000)
195                 
196                 if share.bitcoin_hash <= share.header['target']:
197                     print
198                     print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
199                     print
200                     if factory.conn.value is not None:
201                         factory.conn.value.send_block(block=share.as_block(tracker, args.net))
202                     else:
203                         print 'No bitcoind connection! Erp!'
204             
205             if shares and peer is not None:
206                 peer_heads.setdefault(shares[0].hash, set()).add(peer)
207             
208             if some_new:
209                 set_real_work2()
210             
211             if len(shares) > 5:
212                 print '... done processing %i shares. Have: %i/~%i' % (len(shares), len(tracker.shares), 2*args.net.CHAIN_LENGTH)
213         
214         def p2p_share_hashes(share_hashes, peer):
215             t = time.time()
216             get_hashes = []
217             for share_hash in share_hashes:
218                 if share_hash in tracker.shares:
219                     continue
220                 last_request_time, count = requested.get(share_hash, (None, 0))
221                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
222                     continue
223                 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
224                 get_hashes.append(share_hash)
225                 requested[share_hash] = t, count + 1
226             
227             if share_hashes and peer is not None:
228                 peer_heads.setdefault(share_hashes[0], set()).add(peer)
229             if get_hashes:
230                 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
231         
232         def p2p_get_shares(share_hashes, parents, stops, peer):
233             parents = min(parents, 1000//len(share_hashes))
234             stops = set(stops)
235             shares = []
236             for share_hash in share_hashes:
237                 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
238                     if share.hash in stops:
239                         break
240                     shares.append(share)
241             peer.send_shares(shares, full=True)
242         
243         print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
244         
245         def parse(x):
246             if ':' in x:
247                 ip, port = x.split(':')
248                 return ip, int(port)
249             else:
250                 return x, args.net.P2P_PORT
251         
252         nodes = set([
253             ('72.14.191.28', args.net.P2P_PORT),
254             ('62.204.197.159', args.net.P2P_PORT),
255             ('142.58.248.28', args.net.P2P_PORT),
256             ('94.23.34.145', args.net.P2P_PORT),
257         ])
258         for host in [
259             'p2pool.forre.st',
260             'dabuttonfactory.com',
261         ]:
262             try:
263                 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
264             except:
265                 log.err(None, 'Error resolving bootstrap node IP:')
266         
267         p2p_node = p2p.Node(
268             current_work=current_work,
269             port=args.p2pool_port,
270             net=args.net,
271             addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
272             mode=0 if args.low_bandwidth else 1,
273             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
274         )
275         p2p_node.handle_shares = p2p_shares
276         p2p_node.handle_share_hashes = p2p_share_hashes
277         p2p_node.handle_get_shares = p2p_get_shares
278         
279         p2p_node.start()
280         
281         # send share when the chain changes to their chain
282         def work_changed(new_work):
283             #print 'Work changed:', new_work
284             for share in tracker.get_chain_known(new_work['best_share_hash']):
285                 if share.shared:
286                     break
287                 share_share(share, share.peer)
288         current_work.changed.watch(work_changed)
289         
290         print '    ...success!'
291         print
292         
293         @defer.inlineCallbacks
294         def upnp_thread():
295             while True:
296                 try:
297                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
298                     if not is_lan:
299                         continue
300                     pm = yield portmapper.get_port_mapper()
301                     yield pm._upnp.add_port_mapping(lan_ip, args.net.P2P_PORT, args.net.P2P_PORT, 'p2pool', 'TCP')
302                 except:
303                     if p2pool_init.DEBUG:
304                         log.err(None, "UPnP error:")
305                 yield deferral.sleep(random.expovariate(1/120))
306         
307         if args.upnp:
308             upnp_thread()
309          
310         # start listening for workers with a JSON-RPC server
311         
312         print 'Listening for workers on port %i...' % (args.worker_port,)
313         
314         # setup worker logic
315         
316         merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
317         run_identifier = struct.pack('<Q', random.randrange(2**64))
318         
319         def compute(state, payout_script):
320             if payout_script is None:
321                 payout_script = my_script
322             if state['best_share_hash'] is None and args.net.PERSIST:
323                 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
324             pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
325             pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
326             extra_txs = []
327             size = 0
328             for tx in pre_extra_txs:
329                 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
330                 if size + this_size > 500000:
331                     break
332                 extra_txs.append(tx)
333                 size += this_size
334             # XXX check sigops!
335             # XXX assuming generate_tx is smallish here..
336             generate_tx = p2pool.generate_transaction(
337                 tracker=tracker,
338                 previous_share_hash=state['best_share_hash'],
339                 new_script=payout_script,
340                 subsidy=(50*100000000 >> (state['height'] + 1)//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs),
341                 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
342                 block_target=state['target'],
343                 net=args.net,
344             )
345             print 'Generating! Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
346             #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
347             #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
348             transactions = [generate_tx] + [tx.tx for tx in extra_txs]
349             merkle_root = bitcoin.data.merkle_hash(transactions)
350             merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
351             
352             timestamp = int(time.time() - current_work2.value['clock_offset'])
353             if state['best_share_hash'] is not None:
354                 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
355                 if timestamp2 > timestamp:
356                     print 'Toff', timestamp2 - timestamp
357                     timestamp = timestamp2
358             target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
359             times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
360             #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
361             return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
362         
363         my_shares = set()
364         times = {}
365         
366         def got_response(data):
367             try:
368                 # match up with transactions
369                 header = bitcoin.getwork.decode_data(data)
370                 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
371                 if transactions is None:
372                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
373                     return False
374                 block = dict(header=header, txs=transactions)
375                 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
376                 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
377                     print
378                     print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
379                     print
380                     if factory.conn.value is not None:
381                         factory.conn.value.send_block(block=block)
382                     else:
383                         print 'No bitcoind connection! Erp!'
384                 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
385                 if hash_ > target:
386                     print 'Received invalid share from worker - %x/%x' % (hash_, target)
387                     return False
388                 share = p2pool.Share.from_block(block)
389                 my_shares.add(share.hash)
390                 print 'GOT SHARE! %s prev %s age %.2fs' % (p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
391                 good = share.previous_hash == current_work.value['best_share_hash']
392                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
393                 p2p_shares([share])
394                 # eg. good = share.hash == current_work.value['best_share_hash'] here
395                 return good
396             except:
397                 log.err(None, 'Error processing data received from worker:')
398                 return False
399         
400         web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
401         
402         def get_rate():
403             if current_work.value['best_share_hash'] is not None:
404                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
405                 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height, 720))
406                 return json.dumps(att_s)
407             return json.dumps(None)
408         
409         def get_users():
410             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
411             weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
412             res = {}
413             for script in sorted(weights, key=lambda s: weights[s]):
414                 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
415             return json.dumps(res)
416         
417         class WebInterface(resource.Resource):
418             def __init__(self, func, mime_type):
419                 self.func, self.mime_type = func, mime_type
420             
421             def render_GET(self, request):
422                 request.setHeader('Content-Type', self.mime_type)
423                 return self.func()
424         
425         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
426         web_root.putChild('users', WebInterface(get_users, 'application/json'))
427         if args.charts:
428             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
429         
430         reactor.listenTCP(args.worker_port, server.Site(web_root))
431         
432         print '    ...success!'
433         print
434         
435         # done!
436         
437         tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
438         get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
439         
440         class Tx(object):
441             def __init__(self, tx, seen_at_block):
442                 self.hash = bitcoin.data.tx_type.hash256(tx)
443                 self.tx = tx
444                 self.seen_at_block = seen_at_block
445                 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
446                 #print
447                 #print '%x %r' % (seen_at_block, tx)
448                 #for mention in self.mentions:
449                 #    print '%x' % mention
450                 #print
451                 self.parents_all_in_blocks = False
452                 self.value_in = 0
453                 #print self.tx
454                 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
455                 self._find_parents_in_blocks()
456             
457             @defer.inlineCallbacks
458             def _find_parents_in_blocks(self):
459                 for tx_in in self.tx['tx_ins']:
460                     try:
461                         raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
462                     except Exception:
463                         return
464                     self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
465                     #print raw_transaction
466                     if not raw_transaction['parent_blocks']:
467                         return
468                 self.parents_all_in_blocks = True
469             
470             def is_good(self):
471                 if not self.parents_all_in_blocks:
472                     return False
473                 x = self.is_good2()
474                 #print 'is_good:', x
475                 return x
476         
477         @defer.inlineCallbacks
478         def new_tx(tx_hash):
479             try:
480                 assert isinstance(tx_hash, (int, long))
481                 #print 'REQUESTING', tx_hash
482                 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
483                 #print 'GOT', tx
484                 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
485             except:
486                 log.err(None, 'Error handling tx:')
487         # disable for now, for testing impact on stales
488         #factory.new_tx.watch(new_tx)
489         
490         def new_block(block_hash):
491             work_updated.happened()
492         factory.new_block.watch(new_block)
493         
494         print 'Started successfully!'
495         print
496         
497         ht.updated.watch(set_real_work2)
498         
499         @defer.inlineCallbacks
500         def work1_thread():
501             while True:
502                 flag = work_updated.get_deferred()
503                 try:
504                     yield set_real_work1()
505                 except:
506                     log.err()
507                 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/20))], fireOnOneCallback=True)
508         
509         @defer.inlineCallbacks
510         def work2_thread():
511             while True:
512                 try:
513                     set_real_work2()
514                 except:
515                     log.err()
516                 yield deferral.sleep(random.expovariate(1/20))
517         
518         work1_thread()
519         work2_thread()
520         
521         counter = skiplists.CountsSkipList(tracker, run_identifier)
522         
523         while True:
524             yield deferral.sleep(3)
525             try:
526                 if current_work.value['best_share_hash'] is not None:
527                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
528                     if height > 5:
529                         att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net)
530                         weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
531                         matching_in_chain = counter(current_work.value['best_share_hash'], height)
532                         shares_in_chain = my_shares & matching_in_chain
533                         stale_shares = my_shares - matching_in_chain
534                         print 'Pool: %sH/s in %i shares Recent: %.02f%% >%sH/s Shares: %i (%i stale) Peers: %i' % (
535                             math.format(att_s),
536                             height,
537                             weights.get(my_script, 0)/total_weight*100,
538                             math.format(weights.get(my_script, 0)/total_weight*att_s),
539                             len(shares_in_chain) + len(stale_shares),
540                             len(stale_shares),
541                             len(p2p_node.peers),
542                         )
543                         #weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 100), 2**100)
544                         #for k, v in weights.iteritems():
545                         #    print k.encode('hex'), v/total_weight
546             except:
547                 log.err()
548     except:
549         log.err(None, 'Fatal error:')
550         reactor.stop()
551
552 def run():
553     parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,))
554     parser.add_argument('--version', action='version', version=p2pool_init.__version__)
555     parser.add_argument('--testnet',
556         help='use the testnet',
557         action='store_const', const=p2pool.Testnet, default=p2pool.Mainnet, dest='net')
558     parser.add_argument('--debug',
559         help='debugging mode',
560         action='store_const', const=True, default=False, dest='debug')
561     parser.add_argument('-a', '--address',
562         help='generate to this address (defaults to requesting one from bitcoind)',
563         type=str, action='store', default=None, dest='address')
564     parser.add_argument('--charts',
565         help='generate charts on the web interface (requires PIL and pygame)',
566         action='store_const', const=True, default=False, dest='charts')
567     
568     p2pool_group = parser.add_argument_group('p2pool interface')
569     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
570         help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
571         type=int, action='store', default=None, dest='p2pool_port')
572     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
573         help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
574         type=str, action='append', default=[], dest='p2pool_nodes')
575     parser.add_argument('-l', '--low-bandwidth',
576         help='trade lower bandwidth usage for higher latency (reduced efficiency)',
577         action='store_true', default=False, dest='low_bandwidth')
578     parser.add_argument('--disable-upnp',
579         help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
580         action='store_false', default=True, dest='upnp')
581     
582     worker_group = parser.add_argument_group('worker interface')
583     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
584         help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
585         type=int, action='store', default=9332, dest='worker_port')
586     
587     bitcoind_group = parser.add_argument_group('bitcoind interface')
588     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
589         help='connect to a bitcoind at this address (default: 127.0.0.1)',
590         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
591     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
592         help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
593         type=int, action='store', default=8332, dest='bitcoind_rpc_port')
594     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
595         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
596         type=int, action='store', default=None, dest='bitcoind_p2p_port')
597     
598     bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
599         help='bitcoind RPC interface username',
600         type=str, action='store', dest='bitcoind_rpc_username')
601     bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
602         help='bitcoind RPC interface password',
603         type=str, action='store', dest='bitcoind_rpc_password')
604     
605     args = parser.parse_args()
606     
607     if args.debug:
608         p2pool_init.DEBUG = True
609         class ReopeningFile(object):
610             def __init__(self, *open_args, **open_kwargs):
611                 self.open_args, self.open_kwargs = open_args, open_kwargs
612                 self.inner_file = open(*self.open_args, **self.open_kwargs)
613             def reopen(self):
614                 self.inner_file.close()
615                 self.inner_file = open(*self.open_args, **self.open_kwargs)
616             def write(self, data):
617                 self.inner_file.write(data)
618             def flush(self):
619                 self.inner_file.flush()
620         class TeePipe(object):
621             def __init__(self, outputs):
622                 self.outputs = outputs
623             def write(self, data):
624                 for output in self.outputs:
625                     output.write(data)
626             def flush(self):
627                 for output in self.outputs:
628                     output.flush()
629         class TimestampingPipe(object):
630             def __init__(self, inner_file):
631                 self.inner_file = inner_file
632                 self.buf = ''
633                 self.softspace = 0
634             def write(self, data):
635                 buf = self.buf + data
636                 lines = buf.split('\n')
637                 for line in lines[:-1]:
638                     self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
639                     self.inner_file.flush()
640                 self.buf = lines[-1]
641             def flush(self):
642                 pass
643         logfile = ReopeningFile(os.path.join(os.path.dirname(sys.argv[0]), 'debug.log'), 'w')
644         sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
645         if hasattr(signal, "SIGUSR1"):
646             def sigusr1(signum, frame):
647                 print '''Caught SIGUSR1, closing 'debug.log'...'''
648                 logfile.reopen()
649                 print '''...and reopened 'debug.log' after catching SIGUSR1.'''
650             signal.signal(signal.SIGUSR1, sigusr1)
651     
652     if args.bitcoind_p2p_port is None:
653         args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
654     
655     if args.p2pool_port is None:
656         args.p2pool_port = args.net.P2P_PORT
657     
658     if args.address is not None:
659         try:
660             args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
661         except Exception, e:
662             raise ValueError('error parsing address: ' + repr(e))
663     else:
664         args.pubkey_hash = None
665     
666     reactor.callWhenRunning(main, args)
667     reactor.run()