steady status line interval
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2
3 from __future__ import division
4
5 import argparse
6 import datetime
7 import itertools
8 import os
9 import random
10 import sqlite3
11 import struct
12 import sys
13 import time
14 import json
15
16 from twisted.internet import defer, reactor
17 from twisted.web import server, resource
18 from twisted.python import log
19 from nattraverso import portmapper, ipdiscover
20
21 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
22 from util import db, expiring_dict, jsonrpc, variable, deferral, math
23 from . import p2p, worker_interface, skiplists
24 import p2pool.data as p2pool
25 import p2pool as p2pool_init
26
27 @deferral.retry('Error getting work from bitcoind:', 3)
28 @defer.inlineCallbacks
29 def getwork(bitcoind):
30     # a block could arrive in between these two queries
31     getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
32     try:
33         getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
34     finally:
35         # get rid of residual errors
36         getwork_df.addErrback(lambda fail: None)
37         height_df.addErrback(lambda fail: None)
38     defer.returnValue((getwork, height))
39
40 @deferral.retry('Error getting payout script from bitcoind:', 1)
41 @defer.inlineCallbacks
42 def get_payout_script(factory):
43     res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
44     if res['reply'] == 'success':
45         defer.returnValue(res['script'])
46     elif res['reply'] == 'denied':
47         defer.returnValue(None)
48     else:
49         raise ValueError('Unexpected reply: %r' % (res,))
50
51 @deferral.retry('Error creating payout script:', 10)
52 @defer.inlineCallbacks
53 def get_payout_script2(bitcoind, net):
54     defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
55
56 @defer.inlineCallbacks
57 def main(args):
58     try:
59         if args.charts:
60             from . import draw
61         
62         print 'p2pool (version %s)' % (p2pool_init.__version__,)
63         print
64         
65         # connect to bitcoind over JSON-RPC and do initial getwork
66         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
67         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
68         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
69         temp_work, temp_height = yield getwork(bitcoind)
70         print '    ...success!'
71         print '    Current block hash: %x height: %i' % (temp_work.previous_block, temp_height)
72         print
73         
74         # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
75         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
76         factory = bitcoin.p2p.ClientFactory(args.net)
77         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
78         my_script = yield get_payout_script(factory)
79         if args.pubkey_hash is None:
80             if my_script is None:
81                 print '    IP transaction denied ... falling back to sending to address.'
82                 my_script = yield get_payout_script2(bitcoind, args.net)
83         else:
84             my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
85         print '    ...success!'
86         print '    Payout script:', my_script.encode('hex')
87         print
88         
89         ht = bitcoin.p2p.HeightTracker(factory)
90         
91         tracker = p2pool.OkayTracker(args.net)
92         chains = expiring_dict.ExpiringDict(300)
93         def get_chain(chain_id_data):
94             return chains.setdefault(chain_id_data, Chain(chain_id_data))
95         
96         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
97         
98         # information affecting work that should trigger a long-polling update
99         current_work = variable.Variable(None)
100         # information affecting work that should not trigger a long-polling update
101         current_work2 = variable.Variable(None)
102         
103         work_updated = variable.Event()
104         tracker_updated = variable.Event()
105         
106         requested = expiring_dict.ExpiringDict(300)
107         
108         @defer.inlineCallbacks
109         def set_real_work1():
110             work, height = yield getwork(bitcoind)
111             # XXX call tracker_updated
112             current_work.set(dict(
113                 version=work.version,
114                 previous_block=work.previous_block,
115                 target=work.target,
116                 height=height,
117                 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
118             ))
119             current_work2.set(dict(
120                 clock_offset=time.time() - work.timestamp,
121             ))
122         
123         def set_real_work2():
124             best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
125             
126             t = dict(current_work.value)
127             t['best_share_hash'] = best
128             current_work.set(t)
129             
130             t = time.time()
131             for peer2, share_hash in desired:
132                 #if share_hash not in tracker.tails: # was received in the time tracker.think was running
133                 #    continue
134                 last_request_time, count = requested.get(share_hash, (None, 0))
135                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
136                     continue
137                 potential_peers = set()
138                 for head in tracker.tails[share_hash]:
139                     potential_peers.update(peer_heads.get(head, set()))
140                 potential_peers = [peer for peer in potential_peers if peer.connected2]
141                 if count == 0 and peer2 is not None and peer2.connected2:
142                     peer = peer2
143                 else:
144                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
145                     if peer is None:
146                         continue
147                 
148                 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
149                 peer.send_getshares(
150                     hashes=[share_hash],
151                     parents=2000,
152                     stops=list(set(tracker.heads) | set(
153                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
154                     ))[:100],
155                 )
156                 requested[share_hash] = t, count + 1
157         
158         print 'Initializing work...'
159         yield set_real_work1()
160         set_real_work2()
161         print '    ...success!'
162         
163         start_time = time.time() - current_work2.value['clock_offset']
164         
165         # setup p2p logic and join p2pool network
166         
167         def share_share(share, ignore_peer=None):
168             for peer in p2p_node.peers.itervalues():
169                 if peer is ignore_peer:
170                     continue
171                 #if p2pool_init.DEBUG:
172                 #    print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
173                 peer.send_shares([share])
174             share.flag_shared()
175         
176         def p2p_shares(shares, peer=None):
177             if len(shares) > 5:
178                 print 'Processing %i shares...' % (len(shares),)
179             
180             some_new = False
181             for share in shares:
182                 if share.hash in tracker.shares:
183                     #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
184                     continue
185                 some_new = True
186                 
187                 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
188                 
189                 tracker.add(share)
190                 #for peer2, share_hash in desired:
191                 #    print 'Requesting parent share %x' % (share_hash,)
192                 #    peer2.send_getshares(hashes=[share_hash], parents=2000)
193                 
194                 if share.bitcoin_hash <= share.header['target']:
195                     print
196                     print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
197                     print
198                     if factory.conn.value is not None:
199                         factory.conn.value.send_block(block=share.as_block(tracker, args.net))
200                     else:
201                         print 'No bitcoind connection! Erp!'
202             
203             if shares and peer is not None:
204                 peer_heads.setdefault(shares[0].hash, set()).add(peer)
205             
206             if some_new:
207                 tracker_updated.happened()
208             
209             if len(shares) > 5:
210                 print '... done processing %i shares. Have: %i/~%i' % (len(shares), len(tracker.shares), 2*args.net.CHAIN_LENGTH)
211         
212         def p2p_share_hashes(share_hashes, peer):
213             t = time.time()
214             get_hashes = []
215             for share_hash in share_hashes:
216                 if share_hash in tracker.shares:
217                     continue
218                 last_request_time, count = requested.get(share_hash, (None, 0))
219                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
220                     continue
221                 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
222                 get_hashes.append(share_hash)
223                 requested[share_hash] = t, count + 1
224             
225             if share_hashes and peer is not None:
226                 peer_heads.setdefault(share_hashes[0], set()).add(peer)
227             if get_hashes:
228                 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
229         
230         def p2p_get_shares(share_hashes, parents, stops, peer):
231             parents = min(parents, 1000//len(share_hashes))
232             stops = set(stops)
233             shares = []
234             for share_hash in share_hashes:
235                 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
236                     if share.hash in stops:
237                         break
238                     shares.append(share)
239             peer.send_shares(shares, full=True)
240         
241         print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
242         
243         def parse(x):
244             if ':' in x:
245                 ip, port = x.split(':')
246                 return ip, int(port)
247             else:
248                 return x, args.net.P2P_PORT
249         
250         nodes = set([
251             ('72.14.191.28', args.net.P2P_PORT),
252             ('62.204.197.159', args.net.P2P_PORT),
253             ('142.58.248.28', args.net.P2P_PORT),
254             ('94.23.34.145', args.net.P2P_PORT),
255         ])
256         for host in [
257             'p2pool.forre.st',
258             'dabuttonfactory.com',
259         ]:
260             try:
261                 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
262             except:
263                 log.err(None, 'Error resolving bootstrap node IP:')
264         
265         p2p_node = p2p.Node(
266             current_work=current_work,
267             port=args.p2pool_port,
268             net=args.net,
269             addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
270             mode=0 if args.low_bandwidth else 1,
271             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
272         )
273         p2p_node.handle_shares = p2p_shares
274         p2p_node.handle_share_hashes = p2p_share_hashes
275         p2p_node.handle_get_shares = p2p_get_shares
276         
277         p2p_node.start()
278         
279         # send share when the chain changes to their chain
280         def work_changed(new_work):
281             #print 'Work changed:', new_work
282             for share in tracker.get_chain_known(new_work['best_share_hash']):
283                 if share.shared:
284                     break
285                 share_share(share, share.peer)
286         current_work.changed.watch(work_changed)
287         
288         print '    ...success!'
289         print
290         
291         @defer.inlineCallbacks
292         def upnp_thread():
293             while True:
294                 try:
295                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
296                     if not is_lan:
297                         continue
298                     pm = yield portmapper.get_port_mapper()
299                     yield pm._upnp.add_port_mapping(lan_ip, args.net.P2P_PORT, args.net.P2P_PORT, 'p2pool', 'TCP')
300                 except:
301                     log.err()
302                 yield deferral.sleep(random.expovariate(1/120))
303         
304         if args.upnp:
305             upnp_thread()
306          
307         # start listening for workers with a JSON-RPC server
308         
309         print 'Listening for workers on port %i...' % (args.worker_port,)
310         
311         # setup worker logic
312         
313         merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
314         run_identifier = struct.pack('<Q', random.randrange(2**64))
315         
316         def compute(state, payout_script):
317             if payout_script is None:
318                 payout_script = my_script
319             if state['best_share_hash'] is None and args.net.PERSIST:
320                 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
321             pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
322             pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
323             extra_txs = []
324             size = 0
325             for tx in pre_extra_txs:
326                 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
327                 if size + this_size > 500000:
328                     break
329                 extra_txs.append(tx)
330                 size += this_size
331             # XXX check sigops!
332             # XXX assuming generate_tx is smallish here..
333             generate_tx = p2pool.generate_transaction(
334                 tracker=tracker,
335                 previous_share_hash=state['best_share_hash'],
336                 new_script=payout_script,
337                 subsidy=(50*100000000 >> (state['height'] + 1)//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs),
338                 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
339                 block_target=state['target'],
340                 net=args.net,
341             )
342             print 'Generating! Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
343             #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
344             #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
345             transactions = [generate_tx] + [tx.tx for tx in extra_txs]
346             merkle_root = bitcoin.data.merkle_hash(transactions)
347             merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
348             
349             timestamp = int(time.time() - current_work2.value['clock_offset'])
350             if state['best_share_hash'] is not None:
351                 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
352                 if timestamp2 > timestamp:
353                     print 'Toff', timestamp2 - timestamp
354                     timestamp = timestamp2
355             target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
356             times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
357             #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
358             return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
359         
360         my_shares = set()
361         times = {}
362         
363         def got_response(data):
364             try:
365                 # match up with transactions
366                 header = bitcoin.getwork.decode_data(data)
367                 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
368                 if transactions is None:
369                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
370                     return False
371                 block = dict(header=header, txs=transactions)
372                 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
373                 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
374                     print
375                     print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
376                     print
377                     if factory.conn.value is not None:
378                         factory.conn.value.send_block(block=block)
379                     else:
380                         print 'No bitcoind connection! Erp!'
381                 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
382                 if hash_ > target:
383                     print 'Received invalid share from worker - %x/%x' % (hash_, target)
384                     return False
385                 share = p2pool.Share.from_block(block)
386                 my_shares.add(share.hash)
387                 print 'GOT SHARE! %s prev %s age %.2fs' % (p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
388                 good = share.previous_hash == current_work.value['best_share_hash']
389                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
390                 p2p_shares([share])
391                 # eg. good = share.hash == current_work.value['best_share_hash'] here
392                 return good
393             except:
394                 log.err(None, 'Error processing data received from worker:')
395                 return False
396         
397         web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
398         
399         def get_rate():
400             if current_work.value['best_share_hash'] is not None:
401                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
402                 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height, 720))
403                 return json.dumps(att_s)
404             return json.dumps(None)
405         
406         def get_users():
407             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
408             weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
409             res = {}
410             for script in sorted(weights, key=lambda s: weights[s]):
411                 res[script.encode('hex')] = weights[script]/total_weight
412             return json.dumps(res)
413         
414         class WebInterface(resource.Resource):
415             def __init__(self, func, mime_type):
416                 self.func, self.mime_type = func, mime_type
417             
418             def render_GET(self, request):
419                 request.setHeader('Content-Type', self.mime_type)
420                 return self.func()
421         
422         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
423         web_root.putChild('users', WebInterface(get_users, 'application/json'))
424         if args.charts:
425             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
426         
427         reactor.listenTCP(args.worker_port, server.Site(web_root))
428         
429         print '    ...success!'
430         print
431         
432         # done!
433         
434         tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
435         get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
436         
437         class Tx(object):
438             def __init__(self, tx, seen_at_block):
439                 self.hash = bitcoin.data.tx_type.hash256(tx)
440                 self.tx = tx
441                 self.seen_at_block = seen_at_block
442                 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
443                 #print
444                 #print '%x %r' % (seen_at_block, tx)
445                 #for mention in self.mentions:
446                 #    print '%x' % mention
447                 #print
448                 self.parents_all_in_blocks = False
449                 self.value_in = 0
450                 #print self.tx
451                 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
452                 self._find_parents_in_blocks()
453             
454             @defer.inlineCallbacks
455             def _find_parents_in_blocks(self):
456                 for tx_in in self.tx['tx_ins']:
457                     try:
458                         raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
459                     except Exception:
460                         return
461                     self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
462                     #print raw_transaction
463                     if not raw_transaction['parent_blocks']:
464                         return
465                 self.parents_all_in_blocks = True
466             
467             def is_good(self):
468                 if not self.parents_all_in_blocks:
469                     return False
470                 x = self.is_good2()
471                 #print 'is_good:', x
472                 return x
473         
474         @defer.inlineCallbacks
475         def new_tx(tx_hash):
476             try:
477                 assert isinstance(tx_hash, (int, long))
478                 #print 'REQUESTING', tx_hash
479                 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
480                 #print 'GOT', tx
481                 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
482             except:
483                 log.err(None, 'Error handling tx:')
484         # disable for now, for testing impact on stales
485         #factory.new_tx.watch(new_tx)
486         
487         def new_block(block_hash):
488             work_updated.happened()
489         factory.new_block.watch(new_block)
490         
491         print 'Started successfully!'
492         print
493         
494         ht.updated.watch(lambda x: set_real_work2())
495         
496         @defer.inlineCallbacks
497         def work1_thread():
498             while True:
499                 flag = work_updated.get_deferred()
500                 try:
501                     yield set_real_work1()
502                 except:
503                     log.err()
504                 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/1))], fireOnOneCallback=True)
505         
506         @defer.inlineCallbacks
507         def work2_thread():
508             while True:
509                 flag = tracker_updated.get_deferred()
510                 try:
511                     set_real_work2()
512                 except:
513                     log.err()
514                 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/1))], fireOnOneCallback=True)
515         
516         work1_thread()
517         work2_thread()
518         
519         counter = skiplists.CountsSkipList(tracker, run_identifier)
520         
521         while True:
522             yield deferral.sleep(3)
523             try:
524                 if current_work.value['best_share_hash'] is not None:
525                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
526                     if height > 5:
527                         att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net)
528                         weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
529                         matching_in_chain = counter(current_work.value['best_share_hash'], height)
530                         shares_in_chain = my_shares & matching_in_chain
531                         stale_shares = my_shares - matching_in_chain
532                         print 'Pool: %sH/s in %i shares Recent: %.02f%% >%sH/s Shares: %i (%i stale) Peers: %i' % (
533                             math.format(att_s),
534                             height,
535                             weights.get(my_script, 0)/total_weight*100,
536                             math.format(weights.get(my_script, 0)/total_weight*att_s),
537                             len(shares_in_chain) + len(stale_shares),
538                             len(stale_shares),
539                             len(p2p_node.peers),
540                         )
541                         #weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 100), 2**100)
542                         #for k, v in weights.iteritems():
543                         #    print k.encode('hex'), v/total_weight
544             except:
545                 log.err()
546     except:
547         log.err(None, 'Fatal error:')
548         reactor.stop()
549
550 def run():
551     parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,))
552     parser.add_argument('--version', action='version', version=p2pool_init.__version__)
553     parser.add_argument('--testnet',
554         help='use the testnet',
555         action='store_const', const=p2pool.Testnet, default=p2pool.Mainnet, dest='net')
556     parser.add_argument('--debug',
557         help='debugging mode',
558         action='store_const', const=True, default=False, dest='debug')
559     parser.add_argument('-a', '--address',
560         help='generate to this address (defaults to requesting one from bitcoind)',
561         type=str, action='store', default=None, dest='address')
562     parser.add_argument('--charts',
563         help='generate charts on the web interface (requires PIL and pygame)',
564         action='store_const', const=True, default=False, dest='charts')
565     
566     p2pool_group = parser.add_argument_group('p2pool interface')
567     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
568         help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
569         type=int, action='store', default=None, dest='p2pool_port')
570     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
571         help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
572         type=str, action='append', default=[], dest='p2pool_nodes')
573     parser.add_argument('-l', '--low-bandwidth',
574         help='trade lower bandwidth usage for higher latency (reduced efficiency)',
575         action='store_true', default=False, dest='low_bandwidth')
576     parser.add_argument('--disable-upnp',
577         help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
578         action='store_false', default=True, dest='upnp')
579     
580     worker_group = parser.add_argument_group('worker interface')
581     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
582         help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
583         type=int, action='store', default=9332, dest='worker_port')
584     
585     bitcoind_group = parser.add_argument_group('bitcoind interface')
586     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
587         help='connect to a bitcoind at this address (default: 127.0.0.1)',
588         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
589     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
590         help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
591         type=int, action='store', default=8332, dest='bitcoind_rpc_port')
592     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
593         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
594         type=int, action='store', default=None, dest='bitcoind_p2p_port')
595     
596     bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
597         help='bitcoind RPC interface username',
598         type=str, action='store', dest='bitcoind_rpc_username')
599     bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
600         help='bitcoind RPC interface password',
601         type=str, action='store', dest='bitcoind_rpc_password')
602     
603     args = parser.parse_args()
604     
605     if args.debug:
606         p2pool_init.DEBUG = True
607         class TeePipe(object):
608             def __init__(self, outputs):
609                 self.outputs = outputs
610             def write(self, data):
611                 for output in self.outputs:
612                     output.write(data)
613             def flush(self):
614                 for output in self.outputs:
615                     output.flush()
616         class TimestampingPipe(object):
617             def __init__(self, inner_file):
618                 self.inner_file = inner_file
619                 self.buf = ''
620                 self.softspace = 0
621             def write(self, data):
622                 buf = self.buf + data
623                 lines = buf.split('\n')
624                 for line in lines[:-1]:
625                     self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
626                     self.inner_file.flush()
627                 self.buf = lines[-1]
628             def flush(self):
629                 pass
630         sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, open(os.path.join(os.path.dirname(sys.argv[0]), 'debug.log'), 'w')]))
631     
632     if args.bitcoind_p2p_port is None:
633         args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
634     
635     if args.p2pool_port is None:
636         args.p2pool_port = args.net.P2P_PORT
637     
638     if args.address is not None:
639         try:
640             args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
641         except Exception, e:
642             raise ValueError('error parsing address: ' + repr(e))
643     else:
644         args.pubkey_hash = None
645     
646     reactor.callWhenRunning(main, args)
647     reactor.run()