indentation and imports cleaned up
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2
3 from __future__ import division
4
5 import argparse
6 import datetime
7 import itertools
8 import os
9 import random
10 import sqlite3
11 import struct
12 import sys
13 import time
14 import json
15 import signal
16
17 from twisted.internet import defer, reactor
18 from twisted.web import server, resource
19 from twisted.python import log
20 from nattraverso import portmapper, ipdiscover
21
22 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
23 from util import db, expiring_dict, jsonrpc, variable, deferral, math
24 from . import p2p, worker_interface, skiplists
25 import p2pool.data as p2pool
26 import p2pool as p2pool_init
27
28 @deferral.retry('Error getting work from bitcoind:', 3)
29 @defer.inlineCallbacks
30 def getwork(bitcoind):
31     # a block could arrive in between these two queries
32     getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
33     try:
34         getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
35     finally:
36         # get rid of residual errors
37         getwork_df.addErrback(lambda fail: None)
38         height_df.addErrback(lambda fail: None)
39     defer.returnValue((getwork, height))
40
41 @deferral.retry('Error getting payout script from bitcoind:', 1)
42 @defer.inlineCallbacks
43 def get_payout_script(factory):
44     res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
45     if res['reply'] == 'success':
46         defer.returnValue(res['script'])
47     elif res['reply'] == 'denied':
48         defer.returnValue(None)
49     else:
50         raise ValueError('Unexpected reply: %r' % (res,))
51
52 @deferral.retry('Error creating payout script:', 10)
53 @defer.inlineCallbacks
54 def get_payout_script2(bitcoind, net):
55     defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
56
57 @defer.inlineCallbacks
58 def main(args):
59     try:
60         if args.charts:
61             from . import draw
62         
63         print 'p2pool (version %s)' % (p2pool_init.__version__,)
64         print
65         
66         # connect to bitcoind over JSON-RPC and do initial getwork
67         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
68         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
69         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
70         temp_work, temp_height = yield getwork(bitcoind)
71         print '    ...success!'
72         print '    Current block hash: %x height: %i' % (temp_work.previous_block, temp_height)
73         print
74         
75         # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
76         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
77         factory = bitcoin.p2p.ClientFactory(args.net)
78         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
79         my_script = yield get_payout_script(factory)
80         if args.pubkey_hash is None:
81             if my_script is None:
82                 print '    IP transaction denied ... falling back to sending to address.'
83                 my_script = yield get_payout_script2(bitcoind, args.net)
84         else:
85             my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
86         print '    ...success!'
87         print '    Payout script:', my_script.encode('hex')
88         print
89         
90         print 'Loading cached block headers...'
91         ht = bitcoin.p2p.HeightTracker(factory, args.net.HEADERSTORE_FILENAME)
92         print '   ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
93         print
94         
95         tracker = p2pool.OkayTracker(args.net)
96         chains = expiring_dict.ExpiringDict(300)
97         def get_chain(chain_id_data):
98             return chains.setdefault(chain_id_data, Chain(chain_id_data))
99         
100         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
101         
102         # information affecting work that should trigger a long-polling update
103         current_work = variable.Variable(None)
104         # information affecting work that should not trigger a long-polling update
105         current_work2 = variable.Variable(None)
106         
107         work_updated = variable.Event()
108         
109         requested = expiring_dict.ExpiringDict(300)
110         
111         @defer.inlineCallbacks
112         def set_real_work1():
113             work, height = yield getwork(bitcoind)
114             changed = work.previous_block != current_work.value['previous_block'] if current_work.value is not None else True
115             current_work.set(dict(
116                 version=work.version,
117                 previous_block=work.previous_block,
118                 target=work.target,
119                 height=height,
120                 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
121             ))
122             current_work2.set(dict(
123                 clock_offset=time.time() - work.timestamp,
124             ))
125             if changed:
126                 set_real_work2()
127         
128         def set_real_work2():
129             best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
130             
131             t = dict(current_work.value)
132             t['best_share_hash'] = best
133             current_work.set(t)
134             
135             t = time.time()
136             for peer2, share_hash in desired:
137                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
138                     continue
139                 last_request_time, count = requested.get(share_hash, (None, 0))
140                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
141                     continue
142                 potential_peers = set()
143                 for head in tracker.tails[share_hash]:
144                     potential_peers.update(peer_heads.get(head, set()))
145                 potential_peers = [peer for peer in potential_peers if peer.connected2]
146                 if count == 0 and peer2 is not None and peer2.connected2:
147                     peer = peer2
148                 else:
149                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
150                     if peer is None:
151                         continue
152                 
153                 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
154                 peer.send_getshares(
155                     hashes=[share_hash],
156                     parents=2000,
157                     stops=list(set(tracker.heads) | set(
158                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
159                     ))[:100],
160                 )
161                 requested[share_hash] = t, count + 1
162         
163         print 'Initializing work...'
164         yield set_real_work1()
165         set_real_work2()
166         print '    ...success!'
167         print
168         
169         start_time = time.time() - current_work2.value['clock_offset']
170         
171         # setup p2p logic and join p2pool network
172         
173         def share_share(share, ignore_peer=None):
174             for peer in p2p_node.peers.itervalues():
175                 if peer is ignore_peer:
176                     continue
177                 #if p2pool_init.DEBUG:
178                 #    print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
179                 peer.send_shares([share])
180             share.flag_shared()
181         
182         def p2p_shares(shares, peer=None):
183             if len(shares) > 5:
184                 print 'Processing %i shares...' % (len(shares),)
185             
186             some_new = False
187             for share in shares:
188                 if share.hash in tracker.shares:
189                     #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
190                     continue
191                 some_new = True
192                 
193                 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
194                 
195                 tracker.add(share)
196                 #for peer2, share_hash in desired:
197                 #    print 'Requesting parent share %x' % (share_hash,)
198                 #    peer2.send_getshares(hashes=[share_hash], parents=2000)
199                 
200                 if share.bitcoin_hash <= share.header['target']:
201                     print
202                     print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
203                     print
204                     if factory.conn.value is not None:
205                         factory.conn.value.send_block(block=share.as_block(tracker, args.net))
206                     else:
207                         print 'No bitcoind connection! Erp!'
208             
209             if shares and peer is not None:
210                 peer_heads.setdefault(shares[0].hash, set()).add(peer)
211             
212             if some_new:
213                 set_real_work2()
214             
215             if len(shares) > 5:
216                 print '... done processing %i shares. Have: %i/~%i' % (len(shares), len(tracker.shares), 2*args.net.CHAIN_LENGTH)
217         
218         def p2p_share_hashes(share_hashes, peer):
219             t = time.time()
220             get_hashes = []
221             for share_hash in share_hashes:
222                 if share_hash in tracker.shares:
223                     continue
224                 last_request_time, count = requested.get(share_hash, (None, 0))
225                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
226                     continue
227                 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
228                 get_hashes.append(share_hash)
229                 requested[share_hash] = t, count + 1
230             
231             if share_hashes and peer is not None:
232                 peer_heads.setdefault(share_hashes[0], set()).add(peer)
233             if get_hashes:
234                 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
235         
236         def p2p_get_shares(share_hashes, parents, stops, peer):
237             parents = min(parents, 1000//len(share_hashes))
238             stops = set(stops)
239             shares = []
240             for share_hash in share_hashes:
241                 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
242                     if share.hash in stops:
243                         break
244                     shares.append(share)
245             peer.send_shares(shares, full=True)
246         
247         print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
248         
249         def parse(x):
250             if ':' in x:
251                 ip, port = x.split(':')
252                 return ip, int(port)
253             else:
254                 return x, args.net.P2P_PORT
255         
256         nodes = set([
257             ('72.14.191.28', args.net.P2P_PORT),
258             ('62.204.197.159', args.net.P2P_PORT),
259             ('142.58.248.28', args.net.P2P_PORT),
260             ('94.23.34.145', args.net.P2P_PORT),
261         ])
262         for host in [
263             'p2pool.forre.st',
264             'dabuttonfactory.com',
265         ]:
266             try:
267                 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
268             except:
269                 log.err(None, 'Error resolving bootstrap node IP:')
270         
271         p2p_node = p2p.Node(
272             current_work=current_work,
273             port=args.p2pool_port,
274             net=args.net,
275             addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
276             mode=0 if args.low_bandwidth else 1,
277             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
278         )
279         p2p_node.handle_shares = p2p_shares
280         p2p_node.handle_share_hashes = p2p_share_hashes
281         p2p_node.handle_get_shares = p2p_get_shares
282         
283         p2p_node.start()
284         
285         # send share when the chain changes to their chain
286         def work_changed(new_work):
287             #print 'Work changed:', new_work
288             for share in tracker.get_chain_known(new_work['best_share_hash']):
289                 if share.shared:
290                     break
291                 share_share(share, share.peer)
292         current_work.changed.watch(work_changed)
293         
294         print '    ...success!'
295         print
296         
297         @defer.inlineCallbacks
298         def upnp_thread():
299             while True:
300                 try:
301                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
302                     if not is_lan:
303                         continue
304                     pm = yield portmapper.get_port_mapper()
305                     yield pm._upnp.add_port_mapping(lan_ip, args.net.P2P_PORT, args.net.P2P_PORT, 'p2pool', 'TCP')
306                 except:
307                     if p2pool_init.DEBUG:
308                         log.err(None, "UPnP error:")
309                 yield deferral.sleep(random.expovariate(1/120))
310         
311         if args.upnp:
312             upnp_thread()
313         
314         # start listening for workers with a JSON-RPC server
315         
316         print 'Listening for workers on port %i...' % (args.worker_port,)
317         
318         # setup worker logic
319         
320         merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
321         run_identifier = struct.pack('<Q', random.randrange(2**64))
322         
323         def compute(state, payout_script):
324             if payout_script is None:
325                 payout_script = my_script
326             if state['best_share_hash'] is None and args.net.PERSIST:
327                 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
328             pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
329             pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
330             extra_txs = []
331             size = 0
332             for tx in pre_extra_txs:
333                 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
334                 if size + this_size > 500000:
335                     break
336                 extra_txs.append(tx)
337                 size += this_size
338             # XXX check sigops!
339             # XXX assuming generate_tx is smallish here..
340             generate_tx = p2pool.generate_transaction(
341                 tracker=tracker,
342                 previous_share_hash=state['best_share_hash'],
343                 new_script=payout_script,
344                 subsidy=(50*100000000 >> (state['height'] + 1)//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs),
345                 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
346                 block_target=state['target'],
347                 net=args.net,
348             )
349             print 'Generating! Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
350             #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
351             #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
352             transactions = [generate_tx] + [tx.tx for tx in extra_txs]
353             merkle_root = bitcoin.data.merkle_hash(transactions)
354             merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
355             
356             timestamp = int(time.time() - current_work2.value['clock_offset'])
357             if state['best_share_hash'] is not None:
358                 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
359                 if timestamp2 > timestamp:
360                     print 'Toff', timestamp2 - timestamp
361                     timestamp = timestamp2
362             target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
363             times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
364             #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
365             return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
366         
367         my_shares = set()
368         times = {}
369         
370         def got_response(data):
371             try:
372                 # match up with transactions
373                 header = bitcoin.getwork.decode_data(data)
374                 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
375                 if transactions is None:
376                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
377                     return False
378                 block = dict(header=header, txs=transactions)
379                 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
380                 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
381                     if factory.conn.value is not None:
382                         factory.conn.value.send_block(block=block)
383                     else:
384                         print 'No bitcoind connection! Erp!'
385                     if hash_ <= block['header']['target']:
386                         print
387                         print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
388                         print
389                 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
390                 if hash_ > target:
391                     print 'Received invalid share from worker - %x/%x' % (hash_, target)
392                     return False
393                 share = p2pool.Share.from_block(block)
394                 my_shares.add(share.hash)
395                 print 'GOT SHARE! %s prev %s age %.2fs' % (p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
396                 good = share.previous_hash == current_work.value['best_share_hash']
397                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
398                 p2p_shares([share])
399                 # eg. good = share.hash == current_work.value['best_share_hash'] here
400                 return good
401             except:
402                 log.err(None, 'Error processing data received from worker:')
403                 return False
404         
405         web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
406         
407         def get_rate():
408             if current_work.value['best_share_hash'] is not None:
409                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
410                 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height, 720))
411                 return json.dumps(att_s)
412             return json.dumps(None)
413         
414         def get_users():
415             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
416             weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
417             res = {}
418             for script in sorted(weights, key=lambda s: weights[s]):
419                 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
420             return json.dumps(res)
421         
422         class WebInterface(resource.Resource):
423             def __init__(self, func, mime_type):
424                 self.func, self.mime_type = func, mime_type
425             
426             def render_GET(self, request):
427                 request.setHeader('Content-Type', self.mime_type)
428                 return self.func()
429         
430         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
431         web_root.putChild('users', WebInterface(get_users, 'application/json'))
432         if args.charts:
433             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
434         
435         reactor.listenTCP(args.worker_port, server.Site(web_root))
436         
437         print '    ...success!'
438         print
439         
440         # done!
441         
442         tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
443         get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
444         
445         class Tx(object):
446             def __init__(self, tx, seen_at_block):
447                 self.hash = bitcoin.data.tx_type.hash256(tx)
448                 self.tx = tx
449                 self.seen_at_block = seen_at_block
450                 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
451                 #print
452                 #print '%x %r' % (seen_at_block, tx)
453                 #for mention in self.mentions:
454                 #    print '%x' % mention
455                 #print
456                 self.parents_all_in_blocks = False
457                 self.value_in = 0
458                 #print self.tx
459                 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
460                 self._find_parents_in_blocks()
461             
462             @defer.inlineCallbacks
463             def _find_parents_in_blocks(self):
464                 for tx_in in self.tx['tx_ins']:
465                     try:
466                         raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
467                     except Exception:
468                         return
469                     self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
470                     #print raw_transaction
471                     if not raw_transaction['parent_blocks']:
472                         return
473                 self.parents_all_in_blocks = True
474             
475             def is_good(self):
476                 if not self.parents_all_in_blocks:
477                     return False
478                 x = self.is_good2()
479                 #print 'is_good:', x
480                 return x
481         
482         @defer.inlineCallbacks
483         def new_tx(tx_hash):
484             try:
485                 assert isinstance(tx_hash, (int, long))
486                 #print 'REQUESTING', tx_hash
487                 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
488                 #print 'GOT', tx
489                 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
490             except:
491                 log.err(None, 'Error handling tx:')
492         # disable for now, for testing impact on stales
493         #factory.new_tx.watch(new_tx)
494         
495         def new_block(block_hash):
496             work_updated.happened()
497         factory.new_block.watch(new_block)
498         
499         print 'Started successfully!'
500         print
501         
502         ht.updated.watch(set_real_work2)
503         
504         @defer.inlineCallbacks
505         def work1_thread():
506             while True:
507                 flag = work_updated.get_deferred()
508                 try:
509                     yield set_real_work1()
510                 except:
511                     log.err()
512                 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/20))], fireOnOneCallback=True)
513         
514         @defer.inlineCallbacks
515         def work2_thread():
516             while True:
517                 try:
518                     set_real_work2()
519                 except:
520                     log.err()
521                 yield deferral.sleep(random.expovariate(1/20))
522         
523         work1_thread()
524         work2_thread()
525         
526         counter = skiplists.CountsSkipList(tracker, run_identifier)
527         
528         while True:
529             yield deferral.sleep(3)
530             try:
531                 if current_work.value['best_share_hash'] is not None:
532                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
533                     if height > 2:
534                         att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 120))
535                         weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
536                         matching_in_chain = counter(current_work.value['best_share_hash'], height)
537                         shares_in_chain = my_shares & matching_in_chain
538                         stale_shares = my_shares - matching_in_chain
539                         print 'Pool: %sH/s in %i shares Recent: %.02f%% >%sH/s Shares: %i (%i stale) Peers: %i' % (
540                             math.format(att_s),
541                             height,
542                             weights.get(my_script, 0)/total_weight*100,
543                             math.format(weights.get(my_script, 0)/total_weight*att_s),
544                             len(shares_in_chain) + len(stale_shares),
545                             len(stale_shares),
546                             len(p2p_node.peers),
547                         ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
548                         #weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 100), 2**100)
549                         #for k, v in weights.iteritems():
550                         #    print k.encode('hex'), v/total_weight
551             except:
552                 log.err()
553     except:
554         log.err(None, 'Fatal error:')
555         reactor.stop()
556
557 def run():
558     parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,))
559     parser.add_argument('--version', action='version', version=p2pool_init.__version__)
560     parser.add_argument('--testnet',
561         help='use the testnet',
562         action='store_const', const=p2pool.Testnet, default=p2pool.Mainnet, dest='net')
563     parser.add_argument('--debug',
564         help='debugging mode',
565         action='store_const', const=True, default=False, dest='debug')
566     parser.add_argument('-a', '--address',
567         help='generate to this address (defaults to requesting one from bitcoind)',
568         type=str, action='store', default=None, dest='address')
569     parser.add_argument('--charts',
570         help='generate charts on the web interface (requires PIL and pygame)',
571         action='store_const', const=True, default=False, dest='charts')
572     
573     p2pool_group = parser.add_argument_group('p2pool interface')
574     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
575         help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
576         type=int, action='store', default=None, dest='p2pool_port')
577     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
578         help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
579         type=str, action='append', default=[], dest='p2pool_nodes')
580     parser.add_argument('-l', '--low-bandwidth',
581         help='trade lower bandwidth usage for higher latency (reduced efficiency)',
582         action='store_true', default=False, dest='low_bandwidth')
583     parser.add_argument('--disable-upnp',
584         help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
585         action='store_false', default=True, dest='upnp')
586     
587     worker_group = parser.add_argument_group('worker interface')
588     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
589         help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
590         type=int, action='store', default=9332, dest='worker_port')
591     
592     bitcoind_group = parser.add_argument_group('bitcoind interface')
593     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
594         help='connect to a bitcoind at this address (default: 127.0.0.1)',
595         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
596     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
597         help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
598         type=int, action='store', default=8332, dest='bitcoind_rpc_port')
599     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
600         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
601         type=int, action='store', default=None, dest='bitcoind_p2p_port')
602     
603     bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
604         help='bitcoind RPC interface username',
605         type=str, action='store', dest='bitcoind_rpc_username')
606     bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
607         help='bitcoind RPC interface password',
608         type=str, action='store', dest='bitcoind_rpc_password')
609     
610     args = parser.parse_args()
611     
612     if args.debug:
613         p2pool_init.DEBUG = True
614         class ReopeningFile(object):
615             def __init__(self, *open_args, **open_kwargs):
616                 self.open_args, self.open_kwargs = open_args, open_kwargs
617                 self.inner_file = open(*self.open_args, **self.open_kwargs)
618             def reopen(self):
619                 self.inner_file.close()
620                 self.inner_file = open(*self.open_args, **self.open_kwargs)
621             def write(self, data):
622                 self.inner_file.write(data)
623             def flush(self):
624                 self.inner_file.flush()
625         class TeePipe(object):
626             def __init__(self, outputs):
627                 self.outputs = outputs
628             def write(self, data):
629                 for output in self.outputs:
630                     output.write(data)
631             def flush(self):
632                 for output in self.outputs:
633                     output.flush()
634         class TimestampingPipe(object):
635             def __init__(self, inner_file):
636                 self.inner_file = inner_file
637                 self.buf = ''
638                 self.softspace = 0
639             def write(self, data):
640                 buf = self.buf + data
641                 lines = buf.split('\n')
642                 for line in lines[:-1]:
643                     self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
644                     self.inner_file.flush()
645                 self.buf = lines[-1]
646             def flush(self):
647                 pass
648         logfile = ReopeningFile(os.path.join(os.path.dirname(sys.argv[0]), 'debug.log'), 'w')
649         sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
650         if hasattr(signal, "SIGUSR1"):
651             def sigusr1(signum, frame):
652                 print '''Caught SIGUSR1, closing 'debug.log'...'''
653                 logfile.reopen()
654                 print '''...and reopened 'debug.log' after catching SIGUSR1.'''
655             signal.signal(signal.SIGUSR1, sigusr1)
656     
657     if args.bitcoind_p2p_port is None:
658         args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
659     
660     if args.p2pool_port is None:
661         args.p2pool_port = args.net.P2P_PORT
662     
663     if args.address is not None:
664         try:
665             args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
666         except Exception, e:
667             raise ValueError('error parsing address: ' + repr(e))
668     else:
669         args.pubkey_hash = None
670     
671     reactor.callWhenRunning(main, args)
672     reactor.run()