display fd stats when debugging mode is enabled
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2
3 from __future__ import division
4
5 import argparse
6 import datetime
7 import itertools
8 import os
9 import random
10 import sqlite3
11 import struct
12 import sys
13 import time
14 import json
15 import signal
16
17 from twisted.internet import defer, reactor
18 from twisted.web import server, resource
19 from twisted.python import log
20 from nattraverso import portmapper, ipdiscover
21
22 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
23 from util import db, expiring_dict, jsonrpc, variable, deferral, math
24 from . import p2p, worker_interface, skiplists
25 import p2pool.data as p2pool
26 import p2pool as p2pool_init
27
28 @deferral.retry('Error getting work from bitcoind:', 3)
29 @defer.inlineCallbacks
30 def getwork(bitcoind):
31     # a block could arrive in between these two queries
32     getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
33     try:
34         getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
35     finally:
36         # get rid of residual errors
37         getwork_df.addErrback(lambda fail: None)
38         height_df.addErrback(lambda fail: None)
39     defer.returnValue((getwork, height))
40
41 @deferral.retry('Error getting payout script from bitcoind:', 1)
42 @defer.inlineCallbacks
43 def get_payout_script(factory):
44     res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
45     if res['reply'] == 'success':
46         defer.returnValue(res['script'])
47     elif res['reply'] == 'denied':
48         defer.returnValue(None)
49     else:
50         raise ValueError('Unexpected reply: %r' % (res,))
51
52 @deferral.retry('Error creating payout script:', 10)
53 @defer.inlineCallbacks
54 def get_payout_script2(bitcoind, net):
55     defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
56
57 @defer.inlineCallbacks
58 def main(args):
59     try:
60         if args.charts:
61             from . import draw
62         
63         print 'p2pool (version %s)' % (p2pool_init.__version__,)
64         print
65         
66         # connect to bitcoind over JSON-RPC and do initial getwork
67         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
68         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
69         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
70         temp_work, temp_height = yield getwork(bitcoind)
71         print '    ...success!'
72         print '    Current block hash: %x height: %i' % (temp_work.previous_block, temp_height)
73         print
74         
75         # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
76         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
77         factory = bitcoin.p2p.ClientFactory(args.net)
78         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
79         my_script = yield get_payout_script(factory)
80         if args.pubkey_hash is None:
81             if my_script is None:
82                 print '    IP transaction denied ... falling back to sending to address.'
83                 my_script = yield get_payout_script2(bitcoind, args.net)
84         else:
85             my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
86         print '    ...success!'
87         print '    Payout script:', my_script.encode('hex')
88         print
89         
90         ht = bitcoin.p2p.HeightTracker(factory)
91         
92         tracker = p2pool.OkayTracker(args.net)
93         chains = expiring_dict.ExpiringDict(300)
94         def get_chain(chain_id_data):
95             return chains.setdefault(chain_id_data, Chain(chain_id_data))
96         
97         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
98         
99         # information affecting work that should trigger a long-polling update
100         current_work = variable.Variable(None)
101         # information affecting work that should not trigger a long-polling update
102         current_work2 = variable.Variable(None)
103         
104         work_updated = variable.Event()
105         
106         requested = expiring_dict.ExpiringDict(300)
107         
108         @defer.inlineCallbacks
109         def set_real_work1():
110             work, height = yield getwork(bitcoind)
111             changed = work.previous_block != current_work.value['previous_block'] if current_work.value is not None else True
112             current_work.set(dict(
113                 version=work.version,
114                 previous_block=work.previous_block,
115                 target=work.target,
116                 height=height,
117                 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
118             ))
119             current_work2.set(dict(
120                 clock_offset=time.time() - work.timestamp,
121             ))
122             if changed:
123                 set_real_work2()
124         
125         def set_real_work2():
126             best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
127             
128             t = dict(current_work.value)
129             t['best_share_hash'] = best
130             current_work.set(t)
131             
132             t = time.time()
133             for peer2, share_hash in desired:
134                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
135                     continue
136                 last_request_time, count = requested.get(share_hash, (None, 0))
137                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
138                     continue
139                 potential_peers = set()
140                 for head in tracker.tails[share_hash]:
141                     potential_peers.update(peer_heads.get(head, set()))
142                 potential_peers = [peer for peer in potential_peers if peer.connected2]
143                 if count == 0 and peer2 is not None and peer2.connected2:
144                     peer = peer2
145                 else:
146                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
147                     if peer is None:
148                         continue
149                 
150                 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
151                 peer.send_getshares(
152                     hashes=[share_hash],
153                     parents=2000,
154                     stops=list(set(tracker.heads) | set(
155                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
156                     ))[:100],
157                 )
158                 requested[share_hash] = t, count + 1
159         
160         print 'Initializing work...'
161         yield set_real_work1()
162         set_real_work2()
163         print '    ...success!'
164         
165         start_time = time.time() - current_work2.value['clock_offset']
166         
167         # setup p2p logic and join p2pool network
168         
169         def share_share(share, ignore_peer=None):
170             for peer in p2p_node.peers.itervalues():
171                 if peer is ignore_peer:
172                     continue
173                 #if p2pool_init.DEBUG:
174                 #    print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
175                 peer.send_shares([share])
176             share.flag_shared()
177         
178         def p2p_shares(shares, peer=None):
179             if len(shares) > 5:
180                 print 'Processing %i shares...' % (len(shares),)
181             
182             some_new = False
183             for share in shares:
184                 if share.hash in tracker.shares:
185                     #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
186                     continue
187                 some_new = True
188                 
189                 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
190                 
191                 tracker.add(share)
192                 #for peer2, share_hash in desired:
193                 #    print 'Requesting parent share %x' % (share_hash,)
194                 #    peer2.send_getshares(hashes=[share_hash], parents=2000)
195                 
196                 if share.bitcoin_hash <= share.header['target']:
197                     print
198                     print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
199                     print
200                     if factory.conn.value is not None:
201                         factory.conn.value.send_block(block=share.as_block(tracker, args.net))
202                     else:
203                         print 'No bitcoind connection! Erp!'
204             
205             if shares and peer is not None:
206                 peer_heads.setdefault(shares[0].hash, set()).add(peer)
207             
208             if some_new:
209                 set_real_work2()
210             
211             if len(shares) > 5:
212                 print '... done processing %i shares. Have: %i/~%i' % (len(shares), len(tracker.shares), 2*args.net.CHAIN_LENGTH)
213         
214         def p2p_share_hashes(share_hashes, peer):
215             t = time.time()
216             get_hashes = []
217             for share_hash in share_hashes:
218                 if share_hash in tracker.shares:
219                     continue
220                 last_request_time, count = requested.get(share_hash, (None, 0))
221                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
222                     continue
223                 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
224                 get_hashes.append(share_hash)
225                 requested[share_hash] = t, count + 1
226             
227             if share_hashes and peer is not None:
228                 peer_heads.setdefault(share_hashes[0], set()).add(peer)
229             if get_hashes:
230                 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
231         
232         def p2p_get_shares(share_hashes, parents, stops, peer):
233             parents = min(parents, 1000//len(share_hashes))
234             stops = set(stops)
235             shares = []
236             for share_hash in share_hashes:
237                 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
238                     if share.hash in stops:
239                         break
240                     shares.append(share)
241             peer.send_shares(shares, full=True)
242         
243         print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
244         
245         def parse(x):
246             if ':' in x:
247                 ip, port = x.split(':')
248                 return ip, int(port)
249             else:
250                 return x, args.net.P2P_PORT
251         
252         nodes = set([
253             ('72.14.191.28', args.net.P2P_PORT),
254             ('62.204.197.159', args.net.P2P_PORT),
255             ('142.58.248.28', args.net.P2P_PORT),
256             ('94.23.34.145', args.net.P2P_PORT),
257         ])
258         for host in [
259             'p2pool.forre.st',
260             'dabuttonfactory.com',
261         ]:
262             try:
263                 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
264             except:
265                 log.err(None, 'Error resolving bootstrap node IP:')
266         
267         p2p_node = p2p.Node(
268             current_work=current_work,
269             port=args.p2pool_port,
270             net=args.net,
271             addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
272             mode=0 if args.low_bandwidth else 1,
273             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
274         )
275         p2p_node.handle_shares = p2p_shares
276         p2p_node.handle_share_hashes = p2p_share_hashes
277         p2p_node.handle_get_shares = p2p_get_shares
278         
279         p2p_node.start()
280         
281         # send share when the chain changes to their chain
282         def work_changed(new_work):
283             #print 'Work changed:', new_work
284             for share in tracker.get_chain_known(new_work['best_share_hash']):
285                 if share.shared:
286                     break
287                 share_share(share, share.peer)
288         current_work.changed.watch(work_changed)
289         
290         print '    ...success!'
291         print
292         
293         @defer.inlineCallbacks
294         def upnp_thread():
295             while True:
296                 try:
297                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
298                     if not is_lan:
299                         continue
300                     pm = yield portmapper.get_port_mapper()
301                     yield pm._upnp.add_port_mapping(lan_ip, args.net.P2P_PORT, args.net.P2P_PORT, 'p2pool', 'TCP')
302                 except:
303                     if p2pool_init.DEBUG:
304                         log.err(None, "UPnP error:")
305                 yield deferral.sleep(random.expovariate(1/120))
306         
307         if args.upnp:
308             upnp_thread()
309          
310         # start listening for workers with a JSON-RPC server
311         
312         print 'Listening for workers on port %i...' % (args.worker_port,)
313         
314         # setup worker logic
315         
316         merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
317         run_identifier = struct.pack('<Q', random.randrange(2**64))
318         
319         def compute(state, payout_script):
320             if payout_script is None:
321                 payout_script = my_script
322             if state['best_share_hash'] is None and args.net.PERSIST:
323                 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
324             pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
325             pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
326             extra_txs = []
327             size = 0
328             for tx in pre_extra_txs:
329                 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
330                 if size + this_size > 500000:
331                     break
332                 extra_txs.append(tx)
333                 size += this_size
334             # XXX check sigops!
335             # XXX assuming generate_tx is smallish here..
336             generate_tx = p2pool.generate_transaction(
337                 tracker=tracker,
338                 previous_share_hash=state['best_share_hash'],
339                 new_script=payout_script,
340                 subsidy=(50*100000000 >> (state['height'] + 1)//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs),
341                 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
342                 block_target=state['target'],
343                 net=args.net,
344             )
345             print 'Generating! Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
346             #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
347             #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
348             transactions = [generate_tx] + [tx.tx for tx in extra_txs]
349             merkle_root = bitcoin.data.merkle_hash(transactions)
350             merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
351             
352             timestamp = int(time.time() - current_work2.value['clock_offset'])
353             if state['best_share_hash'] is not None:
354                 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
355                 if timestamp2 > timestamp:
356                     print 'Toff', timestamp2 - timestamp
357                     timestamp = timestamp2
358             target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
359             times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
360             #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
361             return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
362         
363         my_shares = set()
364         times = {}
365         
366         def got_response(data):
367             try:
368                 # match up with transactions
369                 header = bitcoin.getwork.decode_data(data)
370                 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
371                 if transactions is None:
372                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
373                     return False
374                 block = dict(header=header, txs=transactions)
375                 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
376                 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
377                     if factory.conn.value is not None:
378                         factory.conn.value.send_block(block=block)
379                     else:
380                         print 'No bitcoind connection! Erp!'
381                     if hash_ <= block['header']['target']:
382                         print
383                         print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
384                         print
385                 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
386                 if hash_ > target:
387                     print 'Received invalid share from worker - %x/%x' % (hash_, target)
388                     return False
389                 share = p2pool.Share.from_block(block)
390                 my_shares.add(share.hash)
391                 print 'GOT SHARE! %s prev %s age %.2fs' % (p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
392                 good = share.previous_hash == current_work.value['best_share_hash']
393                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
394                 p2p_shares([share])
395                 # eg. good = share.hash == current_work.value['best_share_hash'] here
396                 return good
397             except:
398                 log.err(None, 'Error processing data received from worker:')
399                 return False
400         
401         web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
402         
403         def get_rate():
404             if current_work.value['best_share_hash'] is not None:
405                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
406                 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height, 720))
407                 return json.dumps(att_s)
408             return json.dumps(None)
409         
410         def get_users():
411             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
412             weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
413             res = {}
414             for script in sorted(weights, key=lambda s: weights[s]):
415                 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
416             return json.dumps(res)
417         
418         class WebInterface(resource.Resource):
419             def __init__(self, func, mime_type):
420                 self.func, self.mime_type = func, mime_type
421             
422             def render_GET(self, request):
423                 request.setHeader('Content-Type', self.mime_type)
424                 return self.func()
425         
426         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
427         web_root.putChild('users', WebInterface(get_users, 'application/json'))
428         if args.charts:
429             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
430         
431         reactor.listenTCP(args.worker_port, server.Site(web_root))
432         
433         print '    ...success!'
434         print
435         
436         # done!
437         
438         tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
439         get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
440         
441         class Tx(object):
442             def __init__(self, tx, seen_at_block):
443                 self.hash = bitcoin.data.tx_type.hash256(tx)
444                 self.tx = tx
445                 self.seen_at_block = seen_at_block
446                 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
447                 #print
448                 #print '%x %r' % (seen_at_block, tx)
449                 #for mention in self.mentions:
450                 #    print '%x' % mention
451                 #print
452                 self.parents_all_in_blocks = False
453                 self.value_in = 0
454                 #print self.tx
455                 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
456                 self._find_parents_in_blocks()
457             
458             @defer.inlineCallbacks
459             def _find_parents_in_blocks(self):
460                 for tx_in in self.tx['tx_ins']:
461                     try:
462                         raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
463                     except Exception:
464                         return
465                     self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
466                     #print raw_transaction
467                     if not raw_transaction['parent_blocks']:
468                         return
469                 self.parents_all_in_blocks = True
470             
471             def is_good(self):
472                 if not self.parents_all_in_blocks:
473                     return False
474                 x = self.is_good2()
475                 #print 'is_good:', x
476                 return x
477         
478         @defer.inlineCallbacks
479         def new_tx(tx_hash):
480             try:
481                 assert isinstance(tx_hash, (int, long))
482                 #print 'REQUESTING', tx_hash
483                 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
484                 #print 'GOT', tx
485                 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
486             except:
487                 log.err(None, 'Error handling tx:')
488         # disable for now, for testing impact on stales
489         #factory.new_tx.watch(new_tx)
490         
491         def new_block(block_hash):
492             work_updated.happened()
493         factory.new_block.watch(new_block)
494         
495         print 'Started successfully!'
496         print
497         
498         ht.updated.watch(set_real_work2)
499         
500         @defer.inlineCallbacks
501         def work1_thread():
502             while True:
503                 flag = work_updated.get_deferred()
504                 try:
505                     yield set_real_work1()
506                 except:
507                     log.err()
508                 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/20))], fireOnOneCallback=True)
509         
510         @defer.inlineCallbacks
511         def work2_thread():
512             while True:
513                 try:
514                     set_real_work2()
515                 except:
516                     log.err()
517                 yield deferral.sleep(random.expovariate(1/20))
518         
519         work1_thread()
520         work2_thread()
521         
522         counter = skiplists.CountsSkipList(tracker, run_identifier)
523         
524         while True:
525             yield deferral.sleep(3)
526             try:
527                 if current_work.value['best_share_hash'] is not None:
528                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
529                     if height > 5:
530                         att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net)
531                         weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
532                         matching_in_chain = counter(current_work.value['best_share_hash'], height)
533                         shares_in_chain = my_shares & matching_in_chain
534                         stale_shares = my_shares - matching_in_chain
535                         print 'Pool: %sH/s in %i shares Recent: %.02f%% >%sH/s Shares: %i (%i stale) Peers: %i' % (
536                             math.format(att_s),
537                             height,
538                             weights.get(my_script, 0)/total_weight*100,
539                             math.format(weights.get(my_script, 0)/total_weight*att_s),
540                             len(shares_in_chain) + len(stale_shares),
541                             len(stale_shares),
542                             len(p2p_node.peers),
543                         ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
544                         #weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 100), 2**100)
545                         #for k, v in weights.iteritems():
546                         #    print k.encode('hex'), v/total_weight
547             except:
548                 log.err()
549     except:
550         log.err(None, 'Fatal error:')
551         reactor.stop()
552
553 def run():
554     parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,))
555     parser.add_argument('--version', action='version', version=p2pool_init.__version__)
556     parser.add_argument('--testnet',
557         help='use the testnet',
558         action='store_const', const=p2pool.Testnet, default=p2pool.Mainnet, dest='net')
559     parser.add_argument('--debug',
560         help='debugging mode',
561         action='store_const', const=True, default=False, dest='debug')
562     parser.add_argument('-a', '--address',
563         help='generate to this address (defaults to requesting one from bitcoind)',
564         type=str, action='store', default=None, dest='address')
565     parser.add_argument('--charts',
566         help='generate charts on the web interface (requires PIL and pygame)',
567         action='store_const', const=True, default=False, dest='charts')
568     
569     p2pool_group = parser.add_argument_group('p2pool interface')
570     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
571         help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
572         type=int, action='store', default=None, dest='p2pool_port')
573     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
574         help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
575         type=str, action='append', default=[], dest='p2pool_nodes')
576     parser.add_argument('-l', '--low-bandwidth',
577         help='trade lower bandwidth usage for higher latency (reduced efficiency)',
578         action='store_true', default=False, dest='low_bandwidth')
579     parser.add_argument('--disable-upnp',
580         help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
581         action='store_false', default=True, dest='upnp')
582     
583     worker_group = parser.add_argument_group('worker interface')
584     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
585         help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
586         type=int, action='store', default=9332, dest='worker_port')
587     
588     bitcoind_group = parser.add_argument_group('bitcoind interface')
589     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
590         help='connect to a bitcoind at this address (default: 127.0.0.1)',
591         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
592     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
593         help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
594         type=int, action='store', default=8332, dest='bitcoind_rpc_port')
595     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
596         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
597         type=int, action='store', default=None, dest='bitcoind_p2p_port')
598     
599     bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
600         help='bitcoind RPC interface username',
601         type=str, action='store', dest='bitcoind_rpc_username')
602     bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
603         help='bitcoind RPC interface password',
604         type=str, action='store', dest='bitcoind_rpc_password')
605     
606     args = parser.parse_args()
607     
608     if args.debug:
609         p2pool_init.DEBUG = True
610         class ReopeningFile(object):
611             def __init__(self, *open_args, **open_kwargs):
612                 self.open_args, self.open_kwargs = open_args, open_kwargs
613                 self.inner_file = open(*self.open_args, **self.open_kwargs)
614             def reopen(self):
615                 self.inner_file.close()
616                 self.inner_file = open(*self.open_args, **self.open_kwargs)
617             def write(self, data):
618                 self.inner_file.write(data)
619             def flush(self):
620                 self.inner_file.flush()
621         class TeePipe(object):
622             def __init__(self, outputs):
623                 self.outputs = outputs
624             def write(self, data):
625                 for output in self.outputs:
626                     output.write(data)
627             def flush(self):
628                 for output in self.outputs:
629                     output.flush()
630         class TimestampingPipe(object):
631             def __init__(self, inner_file):
632                 self.inner_file = inner_file
633                 self.buf = ''
634                 self.softspace = 0
635             def write(self, data):
636                 buf = self.buf + data
637                 lines = buf.split('\n')
638                 for line in lines[:-1]:
639                     self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
640                     self.inner_file.flush()
641                 self.buf = lines[-1]
642             def flush(self):
643                 pass
644         logfile = ReopeningFile(os.path.join(os.path.dirname(sys.argv[0]), 'debug.log'), 'w')
645         sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
646         if hasattr(signal, "SIGUSR1"):
647             def sigusr1(signum, frame):
648                 print '''Caught SIGUSR1, closing 'debug.log'...'''
649                 logfile.reopen()
650                 print '''...and reopened 'debug.log' after catching SIGUSR1.'''
651             signal.signal(signal.SIGUSR1, sigusr1)
652     
653     if args.bitcoind_p2p_port is None:
654         args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
655     
656     if args.p2pool_port is None:
657         args.p2pool_port = args.net.P2P_PORT
658     
659     if args.address is not None:
660         try:
661             args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
662         except Exception, e:
663             raise ValueError('error parsing address: ' + repr(e))
664     else:
665         args.pubkey_hash = None
666     
667     reactor.callWhenRunning(main, args)
668     reactor.run()