easier to read hashes/targets
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2
3 from __future__ import division
4
5 import argparse
6 import datetime
7 import itertools
8 import os
9 import random
10 import sqlite3
11 import struct
12 import sys
13 import time
14 import json
15 import signal
16
17 from twisted.internet import defer, reactor
18 from twisted.web import server, resource
19 from twisted.python import log
20 from nattraverso import portmapper, ipdiscover
21
22 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
23 from util import db, expiring_dict, jsonrpc, variable, deferral, math
24 from . import p2p, worker_interface, skiplists
25 import p2pool.data as p2pool
26 import p2pool as p2pool_init
27
28 @deferral.retry('Error getting work from bitcoind:', 3)
29 @defer.inlineCallbacks
30 def getwork(bitcoind):
31     # a block could arrive in between these two queries
32     getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
33     try:
34         getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
35     finally:
36         # get rid of residual errors
37         getwork_df.addErrback(lambda fail: None)
38         height_df.addErrback(lambda fail: None)
39     defer.returnValue((getwork, height))
40
41 @deferral.retry('Error getting payout script from bitcoind:', 1)
42 @defer.inlineCallbacks
43 def get_payout_script(factory):
44     res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
45     if res['reply'] == 'success':
46         defer.returnValue(res['script'])
47     elif res['reply'] == 'denied':
48         defer.returnValue(None)
49     else:
50         raise ValueError('Unexpected reply: %r' % (res,))
51
52 @deferral.retry('Error creating payout script:', 10)
53 @defer.inlineCallbacks
54 def get_payout_script2(bitcoind, net):
55     defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
56
57 @defer.inlineCallbacks
58 def main(args):
59     try:
60         if args.charts:
61             from . import draw
62         
63         print 'p2pool (version %s)' % (p2pool_init.__version__,)
64         print
65         
66         # connect to bitcoind over JSON-RPC and do initial getwork
67         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
68         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
69         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
70         temp_work, temp_height = yield getwork(bitcoind)
71         print '    ...success!'
72         print '    Current block hash: %x height: %i' % (temp_work.previous_block, temp_height)
73         print
74         
75         # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
76         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
77         factory = bitcoin.p2p.ClientFactory(args.net)
78         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
79         my_script = yield get_payout_script(factory)
80         if args.pubkey_hash is None:
81             if my_script is None:
82                 print '    IP transaction denied ... falling back to sending to address.'
83                 my_script = yield get_payout_script2(bitcoind, args.net)
84         else:
85             my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
86         print '    ...success!'
87         print '    Payout script:', my_script.encode('hex')
88         print
89         
90         print 'Loading cached block headers...'
91         ht = bitcoin.p2p.HeightTracker(factory, args.net.HEADERSTORE_FILENAME)
92         print '   ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
93         print
94         
95         tracker = p2pool.OkayTracker(args.net)
96         ss = p2pool.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.SHARESTORE_FILENAME), args.net)
97         known_verified = set()
98         print "Loading shares..."
99         for i, (mode, contents) in enumerate(ss.get_shares()):
100             if mode == 'share':
101                 if contents.hash in tracker.shares:
102                     continue
103                 contents.shared = True
104                 contents.stored = True
105                 tracker.add(contents)
106                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
107                     print "    %i" % (len(tracker.shares),)
108             elif mode == 'verified_hash':
109                 known_verified.add(contents)
110             else:
111                 raise AssertionError()
112         print "    ...inserting %i verified shares..." % (len(known_verified),)
113         for h in known_verified:
114             if h not in tracker.shares:
115                 continue
116             tracker.verified.add(tracker.shares[h])
117         print "    ...done loading %i shares!" % (len(tracker.shares),)
118         print
119         tracker.added.watch(ss.add_share)
120         tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
121         
122         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
123         
124         # information affecting work that should trigger a long-polling update
125         current_work = variable.Variable(None)
126         # information affecting work that should not trigger a long-polling update
127         current_work2 = variable.Variable(None)
128         
129         work_updated = variable.Event()
130         
131         requested = expiring_dict.ExpiringDict(300)
132         
133         @defer.inlineCallbacks
134         def set_real_work1():
135             work, height = yield getwork(bitcoind)
136             changed = work.previous_block != current_work.value['previous_block'] if current_work.value is not None else True
137             current_work.set(dict(
138                 version=work.version,
139                 previous_block=work.previous_block,
140                 target=work.target,
141                 height=height,
142                 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
143             ))
144             current_work2.set(dict(
145                 clock_offset=time.time() - work.timestamp,
146             ))
147             if changed:
148                 set_real_work2()
149         
150         def set_real_work2():
151             best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
152             
153             t = dict(current_work.value)
154             t['best_share_hash'] = best
155             current_work.set(t)
156             
157             t = time.time()
158             for peer2, share_hash in desired:
159                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
160                     continue
161                 last_request_time, count = requested.get(share_hash, (None, 0))
162                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
163                     continue
164                 potential_peers = set()
165                 for head in tracker.tails[share_hash]:
166                     potential_peers.update(peer_heads.get(head, set()))
167                 potential_peers = [peer for peer in potential_peers if peer.connected2]
168                 if count == 0 and peer2 is not None and peer2.connected2:
169                     peer = peer2
170                 else:
171                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
172                     if peer is None:
173                         continue
174                 
175                 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
176                 peer.send_getshares(
177                     hashes=[share_hash],
178                     parents=2000,
179                     stops=list(set(tracker.heads) | set(
180                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
181                     ))[:100],
182                 )
183                 requested[share_hash] = t, count + 1
184         
185         print 'Initializing work...'
186         yield set_real_work1()
187         set_real_work2()
188         print '    ...success!'
189         print
190         
191         start_time = time.time() - current_work2.value['clock_offset']
192         
193         # setup p2p logic and join p2pool network
194         
195         def share_share(share, ignore_peer=None):
196             for peer in p2p_node.peers.itervalues():
197                 if peer is ignore_peer:
198                     continue
199                 #if p2pool_init.DEBUG:
200                 #    print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
201                 peer.send_shares([share])
202             share.flag_shared()
203         
204         def p2p_shares(shares, peer=None):
205             if len(shares) > 5:
206                 print 'Processing %i shares...' % (len(shares),)
207             
208             some_new = False
209             for share in shares:
210                 if share.hash in tracker.shares:
211                     #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
212                     continue
213                 some_new = True
214                 
215                 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
216                 
217                 tracker.add(share)
218                 #for peer2, share_hash in desired:
219                 #    print 'Requesting parent share %x' % (share_hash,)
220                 #    peer2.send_getshares(hashes=[share_hash], parents=2000)
221                 
222                 if share.bitcoin_hash <= share.header['target']:
223                     print
224                     print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
225                     print
226                     if factory.conn.value is not None:
227                         factory.conn.value.send_block(block=share.as_block(tracker, args.net))
228                     else:
229                         print 'No bitcoind connection! Erp!'
230             
231             if shares and peer is not None:
232                 peer_heads.setdefault(shares[0].hash, set()).add(peer)
233             
234             if some_new:
235                 set_real_work2()
236             
237             if len(shares) > 5:
238                 print '... done processing %i shares. Have: %i/~%i' % (len(shares), len(tracker.shares), 2*args.net.CHAIN_LENGTH)
239         
240         def p2p_share_hashes(share_hashes, peer):
241             t = time.time()
242             get_hashes = []
243             for share_hash in share_hashes:
244                 if share_hash in tracker.shares:
245                     continue
246                 last_request_time, count = requested.get(share_hash, (None, 0))
247                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
248                     continue
249                 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
250                 get_hashes.append(share_hash)
251                 requested[share_hash] = t, count + 1
252             
253             if share_hashes and peer is not None:
254                 peer_heads.setdefault(share_hashes[0], set()).add(peer)
255             if get_hashes:
256                 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
257         
258         def p2p_get_shares(share_hashes, parents, stops, peer):
259             parents = min(parents, 1000//len(share_hashes))
260             stops = set(stops)
261             shares = []
262             for share_hash in share_hashes:
263                 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
264                     if share.hash in stops:
265                         break
266                     shares.append(share)
267             peer.send_shares(shares, full=True)
268         
269         print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
270         
271         def parse(x):
272             if ':' in x:
273                 ip, port = x.split(':')
274                 return ip, int(port)
275             else:
276                 return x, args.net.P2P_PORT
277         
278         nodes = set([
279             ('72.14.191.28', args.net.P2P_PORT),
280             ('62.204.197.159', args.net.P2P_PORT),
281             ('142.58.248.28', args.net.P2P_PORT),
282             ('94.23.34.145', args.net.P2P_PORT),
283         ])
284         for host in [
285             'p2pool.forre.st',
286             'dabuttonfactory.com',
287         ]:
288             try:
289                 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
290             except:
291                 log.err(None, 'Error resolving bootstrap node IP:')
292         
293         p2p_node = p2p.Node(
294             current_work=current_work,
295             port=args.p2pool_port,
296             net=args.net,
297             addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
298             mode=0 if args.low_bandwidth else 1,
299             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
300         )
301         p2p_node.handle_shares = p2p_shares
302         p2p_node.handle_share_hashes = p2p_share_hashes
303         p2p_node.handle_get_shares = p2p_get_shares
304         
305         p2p_node.start()
306         
307         # send share when the chain changes to their chain
308         def work_changed(new_work):
309             #print 'Work changed:', new_work
310             for share in tracker.get_chain_known(new_work['best_share_hash']):
311                 if share.shared:
312                     break
313                 share_share(share, share.peer)
314         current_work.changed.watch(work_changed)
315         
316         print '    ...success!'
317         print
318         
319         @defer.inlineCallbacks
320         def upnp_thread():
321             while True:
322                 try:
323                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
324                     if not is_lan:
325                         continue
326                     pm = yield portmapper.get_port_mapper()
327                     yield pm._upnp.add_port_mapping(lan_ip, args.net.P2P_PORT, args.net.P2P_PORT, 'p2pool', 'TCP')
328                 except:
329                     if p2pool_init.DEBUG:
330                         log.err(None, "UPnP error:")
331                 yield deferral.sleep(random.expovariate(1/120))
332         
333         if args.upnp:
334             upnp_thread()
335         
336         # start listening for workers with a JSON-RPC server
337         
338         print 'Listening for workers on port %i...' % (args.worker_port,)
339         
340         # setup worker logic
341         
342         merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
343         run_identifier = struct.pack('<Q', random.randrange(2**64))
344         
345         def compute(state, payout_script):
346             if payout_script is None:
347                 payout_script = my_script
348             if state['best_share_hash'] is None and args.net.PERSIST:
349                 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
350             pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
351             pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
352             extra_txs = []
353             size = 0
354             for tx in pre_extra_txs:
355                 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
356                 if size + this_size > 500000:
357                     break
358                 extra_txs.append(tx)
359                 size += this_size
360             # XXX check sigops!
361             # XXX assuming generate_tx is smallish here..
362             generate_tx = p2pool.generate_transaction(
363                 tracker=tracker,
364                 previous_share_hash=state['best_share_hash'],
365                 new_script=payout_script,
366                 subsidy=(50*100000000 >> (state['height'] + 1)//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs),
367                 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
368                 block_target=state['target'],
369                 net=args.net,
370             )
371             print 'New base for worker. Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
372             #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
373             #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
374             transactions = [generate_tx] + [tx.tx for tx in extra_txs]
375             merkle_root = bitcoin.data.merkle_hash(transactions)
376             merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
377             
378             timestamp = int(time.time() - current_work2.value['clock_offset'])
379             if state['best_share_hash'] is not None:
380                 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
381                 if timestamp2 > timestamp:
382                     print 'Toff', timestamp2 - timestamp
383                     timestamp = timestamp2
384             target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
385             times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
386             #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
387             return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
388         
389         my_shares = set()
390         times = {}
391         
392         def got_response(data):
393             try:
394                 # match up with transactions
395                 header = bitcoin.getwork.decode_data(data)
396                 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
397                 if transactions is None:
398                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
399                     return False
400                 block = dict(header=header, txs=transactions)
401                 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
402                 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
403                     if factory.conn.value is not None:
404                         factory.conn.value.send_block(block=block)
405                     else:
406                         print 'No bitcoind connection! Erp!'
407                     if hash_ <= block['header']['target']:
408                         print
409                         print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
410                         print
411                 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
412                 if hash_ > target:
413                     print 'Worker submitted share with hash > target:\nhash  : %x\ntarget: %x' % (hash_, target)
414                     return False
415                 share = p2pool.Share.from_block(block)
416                 my_shares.add(share.hash)
417                 print 'GOT SHARE! %s prev %s age %.2fs' % (p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
418                 good = share.previous_hash == current_work.value['best_share_hash']
419                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
420                 p2p_shares([share])
421                 # eg. good = share.hash == current_work.value['best_share_hash'] here
422                 return good
423             except:
424                 log.err(None, 'Error processing data received from worker:')
425                 return False
426         
427         web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
428         
429         def get_rate():
430             if current_work.value['best_share_hash'] is not None:
431                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
432                 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height, 720))
433                 return json.dumps(att_s)
434             return json.dumps(None)
435         
436         def get_users():
437             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
438             weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
439             res = {}
440             for script in sorted(weights, key=lambda s: weights[s]):
441                 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
442             return json.dumps(res)
443         
444         class WebInterface(resource.Resource):
445             def __init__(self, func, mime_type):
446                 self.func, self.mime_type = func, mime_type
447             
448             def render_GET(self, request):
449                 request.setHeader('Content-Type', self.mime_type)
450                 return self.func()
451         
452         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
453         web_root.putChild('users', WebInterface(get_users, 'application/json'))
454         if args.charts:
455             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
456         
457         reactor.listenTCP(args.worker_port, server.Site(web_root))
458         
459         print '    ...success!'
460         print
461         
462         # done!
463         
464         tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
465         get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
466         
467         class Tx(object):
468             def __init__(self, tx, seen_at_block):
469                 self.hash = bitcoin.data.tx_type.hash256(tx)
470                 self.tx = tx
471                 self.seen_at_block = seen_at_block
472                 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
473                 #print
474                 #print '%x %r' % (seen_at_block, tx)
475                 #for mention in self.mentions:
476                 #    print '%x' % mention
477                 #print
478                 self.parents_all_in_blocks = False
479                 self.value_in = 0
480                 #print self.tx
481                 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
482                 self._find_parents_in_blocks()
483             
484             @defer.inlineCallbacks
485             def _find_parents_in_blocks(self):
486                 for tx_in in self.tx['tx_ins']:
487                     try:
488                         raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
489                     except Exception:
490                         return
491                     self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
492                     #print raw_transaction
493                     if not raw_transaction['parent_blocks']:
494                         return
495                 self.parents_all_in_blocks = True
496             
497             def is_good(self):
498                 if not self.parents_all_in_blocks:
499                     return False
500                 x = self.is_good2()
501                 #print 'is_good:', x
502                 return x
503         
504         @defer.inlineCallbacks
505         def new_tx(tx_hash):
506             try:
507                 assert isinstance(tx_hash, (int, long))
508                 #print 'REQUESTING', tx_hash
509                 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
510                 #print 'GOT', tx
511                 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
512             except:
513                 log.err(None, 'Error handling tx:')
514         # disable for now, for testing impact on stales
515         #factory.new_tx.watch(new_tx)
516         
517         def new_block(block_hash):
518             work_updated.happened()
519         factory.new_block.watch(new_block)
520         
521         print 'Started successfully!'
522         print
523         
524         ht.updated.watch(set_real_work2)
525         
526         @defer.inlineCallbacks
527         def work1_thread():
528             while True:
529                 flag = work_updated.get_deferred()
530                 try:
531                     yield set_real_work1()
532                 except:
533                     log.err()
534                 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/20))], fireOnOneCallback=True)
535         
536         @defer.inlineCallbacks
537         def work2_thread():
538             while True:
539                 try:
540                     set_real_work2()
541                 except:
542                     log.err()
543                 yield deferral.sleep(random.expovariate(1/20))
544         
545         work1_thread()
546         work2_thread()
547         
548         counter = skiplists.CountsSkipList(tracker, run_identifier)
549         
550         while True:
551             yield deferral.sleep(3)
552             try:
553                 if current_work.value['best_share_hash'] is not None:
554                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
555                     if height > 2:
556                         att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 120))
557                         weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
558                         matching_in_chain = counter(current_work.value['best_share_hash'], height)
559                         shares_in_chain = my_shares & matching_in_chain
560                         stale_shares = my_shares - matching_in_chain
561                         print 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i stale) Peers: %i' % (
562                             math.format(att_s),
563                             height,
564                             len(tracker.verified.shares),
565                             len(tracker.shares),
566                             weights.get(my_script, 0)/total_weight*100,
567                             math.format(weights.get(my_script, 0)/total_weight*att_s),
568                             len(shares_in_chain) + len(stale_shares),
569                             len(stale_shares),
570                             len(p2p_node.peers),
571                         ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
572                         #weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 100), 2**100)
573                         #for k, v in weights.iteritems():
574                         #    print k.encode('hex'), v/total_weight
575             except:
576                 log.err()
577     except:
578         log.err(None, 'Fatal error:')
579         reactor.stop()
580
581 def run():
582     parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,))
583     parser.add_argument('--version', action='version', version=p2pool_init.__version__)
584     parser.add_argument('--testnet',
585         help='use the testnet',
586         action='store_const', const=p2pool.Testnet, default=p2pool.Mainnet, dest='net')
587     parser.add_argument('--debug',
588         help='debugging mode',
589         action='store_const', const=True, default=False, dest='debug')
590     parser.add_argument('-a', '--address',
591         help='generate to this address (defaults to requesting one from bitcoind)',
592         type=str, action='store', default=None, dest='address')
593     parser.add_argument('--charts',
594         help='generate charts on the web interface (requires PIL and pygame)',
595         action='store_const', const=True, default=False, dest='charts')
596     
597     p2pool_group = parser.add_argument_group('p2pool interface')
598     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
599         help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
600         type=int, action='store', default=None, dest='p2pool_port')
601     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
602         help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
603         type=str, action='append', default=[], dest='p2pool_nodes')
604     parser.add_argument('-l', '--low-bandwidth',
605         help='trade lower bandwidth usage for higher latency (reduced efficiency)',
606         action='store_true', default=False, dest='low_bandwidth')
607     parser.add_argument('--disable-upnp',
608         help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
609         action='store_false', default=True, dest='upnp')
610     
611     worker_group = parser.add_argument_group('worker interface')
612     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
613         help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
614         type=int, action='store', default=9332, dest='worker_port')
615     
616     bitcoind_group = parser.add_argument_group('bitcoind interface')
617     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
618         help='connect to a bitcoind at this address (default: 127.0.0.1)',
619         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
620     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
621         help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
622         type=int, action='store', default=8332, dest='bitcoind_rpc_port')
623     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
624         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
625         type=int, action='store', default=None, dest='bitcoind_p2p_port')
626     
627     bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
628         help='bitcoind RPC interface username',
629         type=str, action='store', dest='bitcoind_rpc_username')
630     bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
631         help='bitcoind RPC interface password',
632         type=str, action='store', dest='bitcoind_rpc_password')
633     
634     args = parser.parse_args()
635     
636     if args.debug:
637         p2pool_init.DEBUG = True
638         class ReopeningFile(object):
639             def __init__(self, *open_args, **open_kwargs):
640                 self.open_args, self.open_kwargs = open_args, open_kwargs
641                 self.inner_file = open(*self.open_args, **self.open_kwargs)
642             def reopen(self):
643                 self.inner_file.close()
644                 self.inner_file = open(*self.open_args, **self.open_kwargs)
645             def write(self, data):
646                 self.inner_file.write(data)
647             def flush(self):
648                 self.inner_file.flush()
649         class TeePipe(object):
650             def __init__(self, outputs):
651                 self.outputs = outputs
652             def write(self, data):
653                 for output in self.outputs:
654                     output.write(data)
655             def flush(self):
656                 for output in self.outputs:
657                     output.flush()
658         class TimestampingPipe(object):
659             def __init__(self, inner_file):
660                 self.inner_file = inner_file
661                 self.buf = ''
662                 self.softspace = 0
663             def write(self, data):
664                 buf = self.buf + data
665                 lines = buf.split('\n')
666                 for line in lines[:-1]:
667                     self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
668                     self.inner_file.flush()
669                 self.buf = lines[-1]
670             def flush(self):
671                 pass
672         logfile = ReopeningFile(os.path.join(os.path.dirname(sys.argv[0]), 'debug.log'), 'w')
673         sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
674         if hasattr(signal, "SIGUSR1"):
675             def sigusr1(signum, frame):
676                 print '''Caught SIGUSR1, closing 'debug.log'...'''
677                 logfile.reopen()
678                 print '''...and reopened 'debug.log' after catching SIGUSR1.'''
679             signal.signal(signal.SIGUSR1, sigusr1)
680     
681     if args.bitcoind_p2p_port is None:
682         args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
683     
684     if args.p2pool_port is None:
685         args.p2pool_port = args.net.P2P_PORT
686     
687     if args.address is not None:
688         try:
689             args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
690         except Exception, e:
691             raise ValueError('error parsing address: ' + repr(e))
692     else:
693         args.pubkey_hash = None
694     
695     reactor.callWhenRunning(main, args)
696     reactor.run()