added disk cache for block headers
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2
3 from __future__ import division
4
5 import argparse
6 import datetime
7 import itertools
8 import os
9 import random
10 import sqlite3
11 import struct
12 import sys
13 import time
14 import json
15 import signal
16
17 from twisted.internet import defer, reactor
18 from twisted.web import server, resource
19 from twisted.python import log
20 from nattraverso import portmapper, ipdiscover
21
22 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
23 from util import db, expiring_dict, jsonrpc, variable, deferral, math
24 from . import p2p, worker_interface, skiplists
25 import p2pool.data as p2pool
26 import p2pool as p2pool_init
27
28 @deferral.retry('Error getting work from bitcoind:', 3)
29 @defer.inlineCallbacks
30 def getwork(bitcoind):
31     # a block could arrive in between these two queries
32     getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
33     try:
34         getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
35     finally:
36         # get rid of residual errors
37         getwork_df.addErrback(lambda fail: None)
38         height_df.addErrback(lambda fail: None)
39     defer.returnValue((getwork, height))
40
41 @deferral.retry('Error getting payout script from bitcoind:', 1)
42 @defer.inlineCallbacks
43 def get_payout_script(factory):
44     res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
45     if res['reply'] == 'success':
46         defer.returnValue(res['script'])
47     elif res['reply'] == 'denied':
48         defer.returnValue(None)
49     else:
50         raise ValueError('Unexpected reply: %r' % (res,))
51
52 @deferral.retry('Error creating payout script:', 10)
53 @defer.inlineCallbacks
54 def get_payout_script2(bitcoind, net):
55     defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
56
57 @defer.inlineCallbacks
58 def main(args):
59     try:
60         if args.charts:
61             from . import draw
62         
63         print 'p2pool (version %s)' % (p2pool_init.__version__,)
64         print
65         
66         # connect to bitcoind over JSON-RPC and do initial getwork
67         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
68         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
69         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
70         temp_work, temp_height = yield getwork(bitcoind)
71         print '    ...success!'
72         print '    Current block hash: %x height: %i' % (temp_work.previous_block, temp_height)
73         print
74         
75         # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
76         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
77         factory = bitcoin.p2p.ClientFactory(args.net)
78         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
79         my_script = yield get_payout_script(factory)
80         if args.pubkey_hash is None:
81             if my_script is None:
82                 print '    IP transaction denied ... falling back to sending to address.'
83                 my_script = yield get_payout_script2(bitcoind, args.net)
84         else:
85             my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
86         print '    ...success!'
87         print '    Payout script:', my_script.encode('hex')
88         print
89         
90         print 'Loading cached block headers...'
91         ht = bitcoin.p2p.HeightTracker(factory, args.net.HEADERSTORE_FILENAME)
92         print '   ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
93         print
94         
95         tracker = p2pool.OkayTracker(args.net)
96         chains = expiring_dict.ExpiringDict(300)
97         def get_chain(chain_id_data):
98             return chains.setdefault(chain_id_data, Chain(chain_id_data))
99         
100         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
101         
102         # information affecting work that should trigger a long-polling update
103         current_work = variable.Variable(None)
104         # information affecting work that should not trigger a long-polling update
105         current_work2 = variable.Variable(None)
106         
107         work_updated = variable.Event()
108         
109         requested = expiring_dict.ExpiringDict(300)
110         
111         @defer.inlineCallbacks
112         def set_real_work1():
113             work, height = yield getwork(bitcoind)
114             changed = work.previous_block != current_work.value['previous_block'] if current_work.value is not None else True
115             current_work.set(dict(
116                 version=work.version,
117                 previous_block=work.previous_block,
118                 target=work.target,
119                 height=height,
120                 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
121             ))
122             current_work2.set(dict(
123                 clock_offset=time.time() - work.timestamp,
124             ))
125             if changed:
126                 set_real_work2()
127         
128         def set_real_work2():
129             best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
130             
131             t = dict(current_work.value)
132             t['best_share_hash'] = best
133             current_work.set(t)
134             
135             t = time.time()
136             for peer2, share_hash in desired:
137                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
138                     continue
139                 last_request_time, count = requested.get(share_hash, (None, 0))
140                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
141                     continue
142                 potential_peers = set()
143                 for head in tracker.tails[share_hash]:
144                     potential_peers.update(peer_heads.get(head, set()))
145                 potential_peers = [peer for peer in potential_peers if peer.connected2]
146                 if count == 0 and peer2 is not None and peer2.connected2:
147                     peer = peer2
148                 else:
149                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
150                     if peer is None:
151                         continue
152                 
153                 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
154                 peer.send_getshares(
155                     hashes=[share_hash],
156                     parents=2000,
157                     stops=list(set(tracker.heads) | set(
158                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
159                     ))[:100],
160                 )
161                 requested[share_hash] = t, count + 1
162         
163         print 'Initializing work...'
164         yield set_real_work1()
165         set_real_work2()
166         print '    ...success!'
167         
168         start_time = time.time() - current_work2.value['clock_offset']
169         
170         # setup p2p logic and join p2pool network
171         
172         def share_share(share, ignore_peer=None):
173             for peer in p2p_node.peers.itervalues():
174                 if peer is ignore_peer:
175                     continue
176                 #if p2pool_init.DEBUG:
177                 #    print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
178                 peer.send_shares([share])
179             share.flag_shared()
180         
181         def p2p_shares(shares, peer=None):
182             if len(shares) > 5:
183                 print 'Processing %i shares...' % (len(shares),)
184             
185             some_new = False
186             for share in shares:
187                 if share.hash in tracker.shares:
188                     #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
189                     continue
190                 some_new = True
191                 
192                 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
193                 
194                 tracker.add(share)
195                 #for peer2, share_hash in desired:
196                 #    print 'Requesting parent share %x' % (share_hash,)
197                 #    peer2.send_getshares(hashes=[share_hash], parents=2000)
198                 
199                 if share.bitcoin_hash <= share.header['target']:
200                     print
201                     print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
202                     print
203                     if factory.conn.value is not None:
204                         factory.conn.value.send_block(block=share.as_block(tracker, args.net))
205                     else:
206                         print 'No bitcoind connection! Erp!'
207             
208             if shares and peer is not None:
209                 peer_heads.setdefault(shares[0].hash, set()).add(peer)
210             
211             if some_new:
212                 set_real_work2()
213             
214             if len(shares) > 5:
215                 print '... done processing %i shares. Have: %i/~%i' % (len(shares), len(tracker.shares), 2*args.net.CHAIN_LENGTH)
216         
217         def p2p_share_hashes(share_hashes, peer):
218             t = time.time()
219             get_hashes = []
220             for share_hash in share_hashes:
221                 if share_hash in tracker.shares:
222                     continue
223                 last_request_time, count = requested.get(share_hash, (None, 0))
224                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
225                     continue
226                 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
227                 get_hashes.append(share_hash)
228                 requested[share_hash] = t, count + 1
229             
230             if share_hashes and peer is not None:
231                 peer_heads.setdefault(share_hashes[0], set()).add(peer)
232             if get_hashes:
233                 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
234         
235         def p2p_get_shares(share_hashes, parents, stops, peer):
236             parents = min(parents, 1000//len(share_hashes))
237             stops = set(stops)
238             shares = []
239             for share_hash in share_hashes:
240                 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
241                     if share.hash in stops:
242                         break
243                     shares.append(share)
244             peer.send_shares(shares, full=True)
245         
246         print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
247         
248         def parse(x):
249             if ':' in x:
250                 ip, port = x.split(':')
251                 return ip, int(port)
252             else:
253                 return x, args.net.P2P_PORT
254         
255         nodes = set([
256             ('72.14.191.28', args.net.P2P_PORT),
257             ('62.204.197.159', args.net.P2P_PORT),
258             ('142.58.248.28', args.net.P2P_PORT),
259             ('94.23.34.145', args.net.P2P_PORT),
260         ])
261         for host in [
262             'p2pool.forre.st',
263             'dabuttonfactory.com',
264         ]:
265             try:
266                 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
267             except:
268                 log.err(None, 'Error resolving bootstrap node IP:')
269         
270         p2p_node = p2p.Node(
271             current_work=current_work,
272             port=args.p2pool_port,
273             net=args.net,
274             addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
275             mode=0 if args.low_bandwidth else 1,
276             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
277         )
278         p2p_node.handle_shares = p2p_shares
279         p2p_node.handle_share_hashes = p2p_share_hashes
280         p2p_node.handle_get_shares = p2p_get_shares
281         
282         p2p_node.start()
283         
284         # send share when the chain changes to their chain
285         def work_changed(new_work):
286             #print 'Work changed:', new_work
287             for share in tracker.get_chain_known(new_work['best_share_hash']):
288                 if share.shared:
289                     break
290                 share_share(share, share.peer)
291         current_work.changed.watch(work_changed)
292         
293         print '    ...success!'
294         print
295         
296         @defer.inlineCallbacks
297         def upnp_thread():
298             while True:
299                 try:
300                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
301                     if not is_lan:
302                         continue
303                     pm = yield portmapper.get_port_mapper()
304                     yield pm._upnp.add_port_mapping(lan_ip, args.net.P2P_PORT, args.net.P2P_PORT, 'p2pool', 'TCP')
305                 except:
306                     if p2pool_init.DEBUG:
307                         log.err(None, "UPnP error:")
308                 yield deferral.sleep(random.expovariate(1/120))
309         
310         if args.upnp:
311             upnp_thread()
312          
313         # start listening for workers with a JSON-RPC server
314         
315         print 'Listening for workers on port %i...' % (args.worker_port,)
316         
317         # setup worker logic
318         
319         merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
320         run_identifier = struct.pack('<Q', random.randrange(2**64))
321         
322         def compute(state, payout_script):
323             if payout_script is None:
324                 payout_script = my_script
325             if state['best_share_hash'] is None and args.net.PERSIST:
326                 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
327             pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
328             pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
329             extra_txs = []
330             size = 0
331             for tx in pre_extra_txs:
332                 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
333                 if size + this_size > 500000:
334                     break
335                 extra_txs.append(tx)
336                 size += this_size
337             # XXX check sigops!
338             # XXX assuming generate_tx is smallish here..
339             generate_tx = p2pool.generate_transaction(
340                 tracker=tracker,
341                 previous_share_hash=state['best_share_hash'],
342                 new_script=payout_script,
343                 subsidy=(50*100000000 >> (state['height'] + 1)//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs),
344                 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
345                 block_target=state['target'],
346                 net=args.net,
347             )
348             print 'Generating! Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
349             #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
350             #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
351             transactions = [generate_tx] + [tx.tx for tx in extra_txs]
352             merkle_root = bitcoin.data.merkle_hash(transactions)
353             merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
354             
355             timestamp = int(time.time() - current_work2.value['clock_offset'])
356             if state['best_share_hash'] is not None:
357                 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
358                 if timestamp2 > timestamp:
359                     print 'Toff', timestamp2 - timestamp
360                     timestamp = timestamp2
361             target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
362             times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
363             #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
364             return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
365         
366         my_shares = set()
367         times = {}
368         
369         def got_response(data):
370             try:
371                 # match up with transactions
372                 header = bitcoin.getwork.decode_data(data)
373                 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
374                 if transactions is None:
375                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
376                     return False
377                 block = dict(header=header, txs=transactions)
378                 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
379                 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
380                     if factory.conn.value is not None:
381                         factory.conn.value.send_block(block=block)
382                     else:
383                         print 'No bitcoind connection! Erp!'
384                     if hash_ <= block['header']['target']:
385                         print
386                         print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
387                         print
388                 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
389                 if hash_ > target:
390                     print 'Received invalid share from worker - %x/%x' % (hash_, target)
391                     return False
392                 share = p2pool.Share.from_block(block)
393                 my_shares.add(share.hash)
394                 print 'GOT SHARE! %s prev %s age %.2fs' % (p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
395                 good = share.previous_hash == current_work.value['best_share_hash']
396                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
397                 p2p_shares([share])
398                 # eg. good = share.hash == current_work.value['best_share_hash'] here
399                 return good
400             except:
401                 log.err(None, 'Error processing data received from worker:')
402                 return False
403         
404         web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
405         
406         def get_rate():
407             if current_work.value['best_share_hash'] is not None:
408                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
409                 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height, 720))
410                 return json.dumps(att_s)
411             return json.dumps(None)
412         
413         def get_users():
414             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
415             weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
416             res = {}
417             for script in sorted(weights, key=lambda s: weights[s]):
418                 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
419             return json.dumps(res)
420         
421         class WebInterface(resource.Resource):
422             def __init__(self, func, mime_type):
423                 self.func, self.mime_type = func, mime_type
424             
425             def render_GET(self, request):
426                 request.setHeader('Content-Type', self.mime_type)
427                 return self.func()
428         
429         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
430         web_root.putChild('users', WebInterface(get_users, 'application/json'))
431         if args.charts:
432             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
433         
434         reactor.listenTCP(args.worker_port, server.Site(web_root))
435         
436         print '    ...success!'
437         print
438         
439         # done!
440         
441         tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
442         get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
443         
444         class Tx(object):
445             def __init__(self, tx, seen_at_block):
446                 self.hash = bitcoin.data.tx_type.hash256(tx)
447                 self.tx = tx
448                 self.seen_at_block = seen_at_block
449                 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
450                 #print
451                 #print '%x %r' % (seen_at_block, tx)
452                 #for mention in self.mentions:
453                 #    print '%x' % mention
454                 #print
455                 self.parents_all_in_blocks = False
456                 self.value_in = 0
457                 #print self.tx
458                 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
459                 self._find_parents_in_blocks()
460             
461             @defer.inlineCallbacks
462             def _find_parents_in_blocks(self):
463                 for tx_in in self.tx['tx_ins']:
464                     try:
465                         raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
466                     except Exception:
467                         return
468                     self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
469                     #print raw_transaction
470                     if not raw_transaction['parent_blocks']:
471                         return
472                 self.parents_all_in_blocks = True
473             
474             def is_good(self):
475                 if not self.parents_all_in_blocks:
476                     return False
477                 x = self.is_good2()
478                 #print 'is_good:', x
479                 return x
480         
481         @defer.inlineCallbacks
482         def new_tx(tx_hash):
483             try:
484                 assert isinstance(tx_hash, (int, long))
485                 #print 'REQUESTING', tx_hash
486                 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
487                 #print 'GOT', tx
488                 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
489             except:
490                 log.err(None, 'Error handling tx:')
491         # disable for now, for testing impact on stales
492         #factory.new_tx.watch(new_tx)
493         
494         def new_block(block_hash):
495             work_updated.happened()
496         factory.new_block.watch(new_block)
497         
498         print 'Started successfully!'
499         print
500         
501         ht.updated.watch(set_real_work2)
502         
503         @defer.inlineCallbacks
504         def work1_thread():
505             while True:
506                 flag = work_updated.get_deferred()
507                 try:
508                     yield set_real_work1()
509                 except:
510                     log.err()
511                 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/20))], fireOnOneCallback=True)
512         
513         @defer.inlineCallbacks
514         def work2_thread():
515             while True:
516                 try:
517                     set_real_work2()
518                 except:
519                     log.err()
520                 yield deferral.sleep(random.expovariate(1/20))
521         
522         work1_thread()
523         work2_thread()
524         
525         counter = skiplists.CountsSkipList(tracker, run_identifier)
526         
527         while True:
528             yield deferral.sleep(3)
529             try:
530                 if current_work.value['best_share_hash'] is not None:
531                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
532                     if height > 2:
533                         att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 120))
534                         weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
535                         matching_in_chain = counter(current_work.value['best_share_hash'], height)
536                         shares_in_chain = my_shares & matching_in_chain
537                         stale_shares = my_shares - matching_in_chain
538                         print 'Pool: %sH/s in %i shares Recent: %.02f%% >%sH/s Shares: %i (%i stale) Peers: %i' % (
539                             math.format(att_s),
540                             height,
541                             weights.get(my_script, 0)/total_weight*100,
542                             math.format(weights.get(my_script, 0)/total_weight*att_s),
543                             len(shares_in_chain) + len(stale_shares),
544                             len(stale_shares),
545                             len(p2p_node.peers),
546                         ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
547                         #weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 100), 2**100)
548                         #for k, v in weights.iteritems():
549                         #    print k.encode('hex'), v/total_weight
550             except:
551                 log.err()
552     except:
553         log.err(None, 'Fatal error:')
554         reactor.stop()
555
556 def run():
557     parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,))
558     parser.add_argument('--version', action='version', version=p2pool_init.__version__)
559     parser.add_argument('--testnet',
560         help='use the testnet',
561         action='store_const', const=p2pool.Testnet, default=p2pool.Mainnet, dest='net')
562     parser.add_argument('--debug',
563         help='debugging mode',
564         action='store_const', const=True, default=False, dest='debug')
565     parser.add_argument('-a', '--address',
566         help='generate to this address (defaults to requesting one from bitcoind)',
567         type=str, action='store', default=None, dest='address')
568     parser.add_argument('--charts',
569         help='generate charts on the web interface (requires PIL and pygame)',
570         action='store_const', const=True, default=False, dest='charts')
571     
572     p2pool_group = parser.add_argument_group('p2pool interface')
573     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
574         help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
575         type=int, action='store', default=None, dest='p2pool_port')
576     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
577         help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
578         type=str, action='append', default=[], dest='p2pool_nodes')
579     parser.add_argument('-l', '--low-bandwidth',
580         help='trade lower bandwidth usage for higher latency (reduced efficiency)',
581         action='store_true', default=False, dest='low_bandwidth')
582     parser.add_argument('--disable-upnp',
583         help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
584         action='store_false', default=True, dest='upnp')
585     
586     worker_group = parser.add_argument_group('worker interface')
587     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
588         help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
589         type=int, action='store', default=9332, dest='worker_port')
590     
591     bitcoind_group = parser.add_argument_group('bitcoind interface')
592     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
593         help='connect to a bitcoind at this address (default: 127.0.0.1)',
594         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
595     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
596         help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
597         type=int, action='store', default=8332, dest='bitcoind_rpc_port')
598     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
599         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
600         type=int, action='store', default=None, dest='bitcoind_p2p_port')
601     
602     bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
603         help='bitcoind RPC interface username',
604         type=str, action='store', dest='bitcoind_rpc_username')
605     bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
606         help='bitcoind RPC interface password',
607         type=str, action='store', dest='bitcoind_rpc_password')
608     
609     args = parser.parse_args()
610     
611     if args.debug:
612         p2pool_init.DEBUG = True
613         class ReopeningFile(object):
614             def __init__(self, *open_args, **open_kwargs):
615                 self.open_args, self.open_kwargs = open_args, open_kwargs
616                 self.inner_file = open(*self.open_args, **self.open_kwargs)
617             def reopen(self):
618                 self.inner_file.close()
619                 self.inner_file = open(*self.open_args, **self.open_kwargs)
620             def write(self, data):
621                 self.inner_file.write(data)
622             def flush(self):
623                 self.inner_file.flush()
624         class TeePipe(object):
625             def __init__(self, outputs):
626                 self.outputs = outputs
627             def write(self, data):
628                 for output in self.outputs:
629                     output.write(data)
630             def flush(self):
631                 for output in self.outputs:
632                     output.flush()
633         class TimestampingPipe(object):
634             def __init__(self, inner_file):
635                 self.inner_file = inner_file
636                 self.buf = ''
637                 self.softspace = 0
638             def write(self, data):
639                 buf = self.buf + data
640                 lines = buf.split('\n')
641                 for line in lines[:-1]:
642                     self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
643                     self.inner_file.flush()
644                 self.buf = lines[-1]
645             def flush(self):
646                 pass
647         logfile = ReopeningFile(os.path.join(os.path.dirname(sys.argv[0]), 'debug.log'), 'w')
648         sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
649         if hasattr(signal, "SIGUSR1"):
650             def sigusr1(signum, frame):
651                 print '''Caught SIGUSR1, closing 'debug.log'...'''
652                 logfile.reopen()
653                 print '''...and reopened 'debug.log' after catching SIGUSR1.'''
654             signal.signal(signal.SIGUSR1, sigusr1)
655     
656     if args.bitcoind_p2p_port is None:
657         args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
658     
659     if args.p2pool_port is None:
660         args.p2pool_port = args.net.P2P_PORT
661     
662     if args.address is not None:
663         try:
664             args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
665         except Exception, e:
666             raise ValueError('error parsing address: ' + repr(e))
667     else:
668         args.pubkey_hash = None
669     
670     reactor.callWhenRunning(main, args)
671     reactor.run()