Merge branch 'unstable' of git://github.com/forrestv/p2pool into user_messages
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2
3 from __future__ import division
4
5 import argparse
6 import datetime
7 import itertools
8 import os
9 import random
10 import sqlite3
11 import struct
12 import sys
13 import time
14 import json
15 import signal
16 import traceback
17
18 from twisted.internet import defer, reactor, task
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
22
23 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
24 from util import db, expiring_dict, jsonrpc, variable, deferral, math
25 from . import p2p, worker_interface, skiplists
26 import p2pool.data as p2pool
27 import p2pool as p2pool_init
28
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind):
32     # a block could arrive in between these two queries
33     getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
34     try:
35         getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
36     finally:
37         # get rid of residual errors
38         getwork_df.addErrback(lambda fail: None)
39         height_df.addErrback(lambda fail: None)
40     defer.returnValue((getwork, height))
41
42 @deferral.retry('Error getting payout script from bitcoind:', 1)
43 @defer.inlineCallbacks
44 def get_payout_script(factory):
45     res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
46     if res['reply'] == 'success':
47         defer.returnValue(res['script'])
48     elif res['reply'] == 'denied':
49         defer.returnValue(None)
50     else:
51         raise ValueError('Unexpected reply: %r' % (res,))
52
53 @deferral.retry('Error creating payout script:', 10)
54 @defer.inlineCallbacks
55 def get_payout_script2(bitcoind, net):
56     defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
57
58 @defer.inlineCallbacks
59 def main(args):
60     try:
61         if args.charts:
62             from . import draw
63         
64         print 'p2pool (version %s)' % (p2pool_init.__version__,)
65         print
66         
67         # connect to bitcoind over JSON-RPC and do initial getwork
68         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
69         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
70         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
71         temp_work, temp_height = yield getwork(bitcoind)
72         print '    ...success!'
73         print '    Current block hash: %x height: %i' % (temp_work.previous_block, temp_height)
74         print
75         
76         # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
77         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
78         factory = bitcoin.p2p.ClientFactory(args.net)
79         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
80         my_script = yield get_payout_script(factory)
81         if args.pubkey_hash is None:
82             if my_script is None:
83                 print '    IP transaction denied ... falling back to sending to address.'
84                 my_script = yield get_payout_script2(bitcoind, args.net)
85         else:
86             my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
87         print '    ...success!'
88         print '    Payout script:', my_script.encode('hex')
89         print
90         
91         print 'Loading cached block headers...'
92         ht = bitcoin.p2p.HeightTracker(factory, args.net.HEADERSTORE_FILENAME)
93         print '   ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
94         print
95         
96         tracker = p2pool.OkayTracker(args.net)
97         ss = p2pool.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.SHARESTORE_FILENAME), args.net)
98         known_verified = set()
99         print "Loading shares..."
100         for i, (mode, contents) in enumerate(ss.get_shares()):
101             if mode == 'share':
102                 if contents.hash in tracker.shares:
103                     continue
104                 contents.shared = True
105                 contents.stored = True
106                 tracker.add(contents)
107                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
108                     print "    %i" % (len(tracker.shares),)
109             elif mode == 'verified_hash':
110                 known_verified.add(contents)
111             else:
112                 raise AssertionError()
113         print "    ...inserting %i verified shares..." % (len(known_verified),)
114         for h in known_verified:
115             if h not in tracker.shares:
116                 continue
117             tracker.verified.add(tracker.shares[h])
118         print "    ...done loading %i shares!" % (len(tracker.shares),)
119         print
120         tracker.added.watch(ss.add_share)
121         tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
122         
123         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
124         
125         # information affecting work that should trigger a long-polling update
126         current_work = variable.Variable(None)
127         # information affecting work that should not trigger a long-polling update
128         current_work2 = variable.Variable(None)
129         
130         work_updated = variable.Event()
131         
132         requested = expiring_dict.ExpiringDict(300)
133         
134         @defer.inlineCallbacks
135         def set_real_work1():
136             work, height = yield getwork(bitcoind)
137             changed = work.previous_block != current_work.value['previous_block'] if current_work.value is not None else True
138             current_work.set(dict(
139                 version=work.version,
140                 previous_block=work.previous_block,
141                 target=work.target,
142                 height=height,
143                 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
144             ))
145             current_work2.set(dict(
146                 clock_offset=time.time() - work.timestamp,
147             ))
148             if changed:
149                 set_real_work2()
150         
151         def set_real_work2():
152             best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
153             
154             t = dict(current_work.value)
155             t['best_share_hash'] = best
156             current_work.set(t)
157             
158             t = time.time()
159             for peer2, share_hash in desired:
160                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
161                     continue
162                 last_request_time, count = requested.get(share_hash, (None, 0))
163                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
164                     continue
165                 potential_peers = set()
166                 for head in tracker.tails[share_hash]:
167                     potential_peers.update(peer_heads.get(head, set()))
168                 potential_peers = [peer for peer in potential_peers if peer.connected2]
169                 if count == 0 and peer2 is not None and peer2.connected2:
170                     peer = peer2
171                 else:
172                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
173                     if peer is None:
174                         continue
175                 
176                 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
177                 peer.send_getshares(
178                     hashes=[share_hash],
179                     parents=2000,
180                     stops=list(set(tracker.heads) | set(
181                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
182                     ))[:100],
183                 )
184                 requested[share_hash] = t, count + 1
185         
186         print 'Initializing work...'
187         yield set_real_work1()
188         set_real_work2()
189         print '    ...success!'
190         print
191         
192         start_time = time.time() - current_work2.value['clock_offset']
193         
194         # setup p2p logic and join p2pool network
195         
196         def share_share(share, ignore_peer=None):
197             for peer in p2p_node.peers.itervalues():
198                 if peer is ignore_peer:
199                     continue
200                 #if p2pool_init.DEBUG:
201                 #    print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
202                 peer.send_shares([share])
203             share.flag_shared()
204         
205         def p2p_shares(shares, peer=None):
206             if len(shares) > 5:
207                 print 'Processing %i shares...' % (len(shares),)
208             
209             some_new = False
210             for share in shares:
211                 if share.hash in tracker.shares:
212                     #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
213                     continue
214                 some_new = True
215                 
216                 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
217                 
218                 tracker.add(share)
219                 #for peer2, share_hash in desired:
220                 #    print 'Requesting parent share %x' % (share_hash,)
221                 #    peer2.send_getshares(hashes=[share_hash], parents=2000)
222                 
223                 if share.bitcoin_hash <= share.header['target']:
224                     print
225                     print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
226                     print
227                     if factory.conn.value is not None:
228                         factory.conn.value.send_block(block=share.as_block(tracker, args.net))
229                     else:
230                         print 'No bitcoind connection! Erp!'
231             
232             if shares and peer is not None:
233                 peer_heads.setdefault(shares[0].hash, set()).add(peer)
234             
235             if some_new:
236                 set_real_work2()
237             
238             if len(shares) > 5:
239                 print '... done processing %i shares. Have: %i/~%i' % (len(shares), len(tracker.shares), 2*args.net.CHAIN_LENGTH)
240         
241         def p2p_share_hashes(share_hashes, peer):
242             t = time.time()
243             get_hashes = []
244             for share_hash in share_hashes:
245                 if share_hash in tracker.shares:
246                     continue
247                 last_request_time, count = requested.get(share_hash, (None, 0))
248                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
249                     continue
250                 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
251                 get_hashes.append(share_hash)
252                 requested[share_hash] = t, count + 1
253             
254             if share_hashes and peer is not None:
255                 peer_heads.setdefault(share_hashes[0], set()).add(peer)
256             if get_hashes:
257                 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
258         
259         def p2p_get_shares(share_hashes, parents, stops, peer):
260             parents = min(parents, 1000//len(share_hashes))
261             stops = set(stops)
262             shares = []
263             for share_hash in share_hashes:
264                 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
265                     if share.hash in stops:
266                         break
267                     shares.append(share)
268             peer.send_shares(shares, full=True)
269         
270         print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
271         
272         def parse(x):
273             if ':' in x:
274                 ip, port = x.split(':')
275                 return ip, int(port)
276             else:
277                 return x, args.net.P2P_PORT
278         
279         nodes = set([
280             ('72.14.191.28', args.net.P2P_PORT),
281             ('62.204.197.159', args.net.P2P_PORT),
282             ('142.58.248.28', args.net.P2P_PORT),
283             ('94.23.34.145', args.net.P2P_PORT),
284         ])
285         for host in [
286             'p2pool.forre.st',
287             'dabuttonfactory.com',
288         ]:
289             try:
290                 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
291             except:
292                 log.err(None, 'Error resolving bootstrap node IP:')
293         
294         p2p_node = p2p.Node(
295             current_work=current_work,
296             port=args.p2pool_port,
297             net=args.net,
298             addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
299             mode=0 if args.low_bandwidth else 1,
300             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
301         )
302         p2p_node.handle_shares = p2p_shares
303         p2p_node.handle_share_hashes = p2p_share_hashes
304         p2p_node.handle_get_shares = p2p_get_shares
305         
306         p2p_node.start()
307         
308         # send share when the chain changes to their chain
309         def work_changed(new_work):
310             #print 'Work changed:', new_work
311             for share in tracker.get_chain_known(new_work['best_share_hash']):
312                 if share.shared:
313                     break
314                 share_share(share, share.peer)
315         current_work.changed.watch(work_changed)
316         
317         print '    ...success!'
318         print
319         
320         @defer.inlineCallbacks
321         def upnp_thread():
322             while True:
323                 try:
324                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
325                     if not is_lan:
326                         continue
327                     pm = yield portmapper.get_port_mapper()
328                     yield pm._upnp.add_port_mapping(lan_ip, args.net.P2P_PORT, args.net.P2P_PORT, 'p2pool', 'TCP')
329                 except:
330                     if p2pool_init.DEBUG:
331                         log.err(None, "UPnP error:")
332                 yield deferral.sleep(random.expovariate(1/120))
333         
334         if args.upnp:
335             upnp_thread()
336         
337         # start listening for workers with a JSON-RPC server
338         
339         print 'Listening for workers on port %i...' % (args.worker_port,)
340         
341         # setup worker logic
342         
343         merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
344         run_identifier = struct.pack('<Q', random.randrange(2**64))
345         
346         def compute(state, payout_script):
347             if payout_script is None:
348                 payout_script = my_script
349             if state['best_share_hash'] is None and args.net.PERSIST:
350                 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
351             pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
352             pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
353             extra_txs = []
354             size = 0
355             for tx in pre_extra_txs:
356                 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
357                 if size + this_size > 500000:
358                     break
359                 extra_txs.append(tx)
360                 size += this_size
361             # XXX check sigops!
362             # XXX assuming generate_tx is smallish here..
363             generate_tx = p2pool.generate_transaction(
364                 tracker=tracker,
365                 previous_share_hash=state['best_share_hash'],
366                 new_script=payout_script,
367                 subsidy=(50*100000000 >> (state['height'] + 1)//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs),
368                 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
369                 block_target=state['target'],
370                 net=args.net,
371             )
372             print 'New base for worker. Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
373             #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
374             #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
375             transactions = [generate_tx] + [tx.tx for tx in extra_txs]
376             merkle_root = bitcoin.data.merkle_hash(transactions)
377             merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
378             
379             timestamp = int(time.time() - current_work2.value['clock_offset'])
380             if state['best_share_hash'] is not None:
381                 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
382                 if timestamp2 > timestamp:
383                     print 'Toff', timestamp2 - timestamp
384                     timestamp = timestamp2
385             target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
386             times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
387             #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
388             return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
389         
390         my_shares = set()
391         times = {}
392         
393         def got_response(data):
394             try:
395                 # match up with transactions
396                 header = bitcoin.getwork.decode_data(data)
397                 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
398                 if transactions is None:
399                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
400                     return False
401                 block = dict(header=header, txs=transactions)
402                 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
403                 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
404                     if factory.conn.value is not None:
405                         factory.conn.value.send_block(block=block)
406                     else:
407                         print 'No bitcoind connection! Erp!'
408                     if hash_ <= block['header']['target']:
409                         print
410                         print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
411                         print
412                 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
413                 if hash_ > target:
414                     print 'Worker submitted share with hash > target:\nhash  : %x\ntarget: %x' % (hash_, target)
415                     return False
416                 share = p2pool.Share.from_block(block)
417                 my_shares.add(share.hash)
418                 print 'GOT SHARE! %s prev %s age %.2fs' % (p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
419                 good = share.previous_hash == current_work.value['best_share_hash']
420                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
421                 p2p_shares([share])
422                 # eg. good = share.hash == current_work.value['best_share_hash'] here
423                 return good
424             except:
425                 log.err(None, 'Error processing data received from worker:')
426                 return False
427         
428         web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
429         
430         def get_rate():
431             if current_work.value['best_share_hash'] is not None:
432                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
433                 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height, 720))
434                 return json.dumps(att_s)
435             return json.dumps(None)
436         
437         def get_users():
438             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
439             weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
440             res = {}
441             for script in sorted(weights, key=lambda s: weights[s]):
442                 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
443             return json.dumps(res)
444         
445         class WebInterface(resource.Resource):
446             def __init__(self, func, mime_type):
447                 self.func, self.mime_type = func, mime_type
448             
449             def render_GET(self, request):
450                 request.setHeader('Content-Type', self.mime_type)
451                 return self.func()
452         
453         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
454         web_root.putChild('users', WebInterface(get_users, 'application/json'))
455         if args.charts:
456             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
457         
458         reactor.listenTCP(args.worker_port, server.Site(web_root))
459         
460         print '    ...success!'
461         print
462         
463         # done!
464         
465         tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
466         get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
467         
468         class Tx(object):
469             def __init__(self, tx, seen_at_block):
470                 self.hash = bitcoin.data.tx_type.hash256(tx)
471                 self.tx = tx
472                 self.seen_at_block = seen_at_block
473                 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
474                 #print
475                 #print '%x %r' % (seen_at_block, tx)
476                 #for mention in self.mentions:
477                 #    print '%x' % mention
478                 #print
479                 self.parents_all_in_blocks = False
480                 self.value_in = 0
481                 #print self.tx
482                 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
483                 self._find_parents_in_blocks()
484             
485             @defer.inlineCallbacks
486             def _find_parents_in_blocks(self):
487                 for tx_in in self.tx['tx_ins']:
488                     try:
489                         raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
490                     except Exception:
491                         return
492                     self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
493                     #print raw_transaction
494                     if not raw_transaction['parent_blocks']:
495                         return
496                 self.parents_all_in_blocks = True
497             
498             def is_good(self):
499                 if not self.parents_all_in_blocks:
500                     return False
501                 x = self.is_good2()
502                 #print 'is_good:', x
503                 return x
504         
505         @defer.inlineCallbacks
506         def new_tx(tx_hash):
507             try:
508                 assert isinstance(tx_hash, (int, long))
509                 #print 'REQUESTING', tx_hash
510                 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
511                 #print 'GOT', tx
512                 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
513             except:
514                 log.err(None, 'Error handling tx:')
515         # disable for now, for testing impact on stales
516         #factory.new_tx.watch(new_tx)
517         
518         def new_block(block_hash):
519             work_updated.happened()
520         factory.new_block.watch(new_block)
521         
522         print 'Started successfully!'
523         print
524         
525         ht.updated.watch(set_real_work2)
526         
527         @defer.inlineCallbacks
528         def work1_thread():
529             while True:
530                 flag = work_updated.get_deferred()
531                 try:
532                     yield set_real_work1()
533                 except:
534                     log.err()
535                 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/20))], fireOnOneCallback=True)
536         
537         @defer.inlineCallbacks
538         def work2_thread():
539             while True:
540                 try:
541                     set_real_work2()
542                 except:
543                     log.err()
544                 yield deferral.sleep(random.expovariate(1/20))
545         
546         work1_thread()
547         work2_thread()
548         
549         
550         def watchdog_handler(signum, frame):
551             print "Watchdog timer went off at:"
552             traceback.print_exc()
553         
554         signal.signal(signal.SIGALRM, watchdog_handler)
555         task.LoopingCall(signal.alarm, 30).start(1)
556         
557         
558         counter = skiplists.CountsSkipList(tracker, run_identifier)
559         
560         while True:
561             yield deferral.sleep(3)
562             try:
563                 if current_work.value['best_share_hash'] is not None:
564                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
565                     if height > 2:
566                         att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 120))
567                         weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
568                         matching_in_chain = counter(current_work.value['best_share_hash'], height)
569                         shares_in_chain = my_shares & matching_in_chain
570                         stale_shares = my_shares - matching_in_chain
571                         print 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i stale) Peers: %i' % (
572                             math.format(att_s),
573                             height,
574                             len(tracker.verified.shares),
575                             len(tracker.shares),
576                             weights.get(my_script, 0)/total_weight*100,
577                             math.format(weights.get(my_script, 0)/total_weight*att_s),
578                             len(shares_in_chain) + len(stale_shares),
579                             len(stale_shares),
580                             len(p2p_node.peers),
581                         ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
582                         #weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 100), 2**100)
583                         #for k, v in weights.iteritems():
584                         #    print k.encode('hex'), v/total_weight
585             except:
586                 log.err()
587     except:
588         log.err(None, 'Fatal error:')
589         reactor.stop()
590
591 def run():
592     parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,))
593     parser.add_argument('--version', action='version', version=p2pool_init.__version__)
594     parser.add_argument('--testnet',
595         help='use the testnet',
596         action='store_const', const=p2pool.Testnet, default=p2pool.Mainnet, dest='net')
597     parser.add_argument('--debug',
598         help='debugging mode',
599         action='store_const', const=True, default=False, dest='debug')
600     parser.add_argument('-a', '--address',
601         help='generate to this address (defaults to requesting one from bitcoind)',
602         type=str, action='store', default=None, dest='address')
603     parser.add_argument('--charts',
604         help='generate charts on the web interface (requires PIL and pygame)',
605         action='store_const', const=True, default=False, dest='charts')
606     
607     p2pool_group = parser.add_argument_group('p2pool interface')
608     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
609         help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
610         type=int, action='store', default=None, dest='p2pool_port')
611     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
612         help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
613         type=str, action='append', default=[], dest='p2pool_nodes')
614     parser.add_argument('-l', '--low-bandwidth',
615         help='trade lower bandwidth usage for higher latency (reduced efficiency)',
616         action='store_true', default=False, dest='low_bandwidth')
617     parser.add_argument('--disable-upnp',
618         help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
619         action='store_false', default=True, dest='upnp')
620     
621     worker_group = parser.add_argument_group('worker interface')
622     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
623         help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
624         type=int, action='store', default=9332, dest='worker_port')
625     
626     bitcoind_group = parser.add_argument_group('bitcoind interface')
627     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
628         help='connect to a bitcoind at this address (default: 127.0.0.1)',
629         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
630     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
631         help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
632         type=int, action='store', default=8332, dest='bitcoind_rpc_port')
633     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
634         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
635         type=int, action='store', default=None, dest='bitcoind_p2p_port')
636     
637     bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
638         help='bitcoind RPC interface username',
639         type=str, action='store', dest='bitcoind_rpc_username')
640     bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
641         help='bitcoind RPC interface password',
642         type=str, action='store', dest='bitcoind_rpc_password')
643     
644     args = parser.parse_args()
645     
646     if args.debug:
647         p2pool_init.DEBUG = True
648         class ReopeningFile(object):
649             def __init__(self, *open_args, **open_kwargs):
650                 self.open_args, self.open_kwargs = open_args, open_kwargs
651                 self.inner_file = open(*self.open_args, **self.open_kwargs)
652             def reopen(self):
653                 self.inner_file.close()
654                 self.inner_file = open(*self.open_args, **self.open_kwargs)
655             def write(self, data):
656                 self.inner_file.write(data)
657             def flush(self):
658                 self.inner_file.flush()
659         class TeePipe(object):
660             def __init__(self, outputs):
661                 self.outputs = outputs
662             def write(self, data):
663                 for output in self.outputs:
664                     output.write(data)
665             def flush(self):
666                 for output in self.outputs:
667                     output.flush()
668         class TimestampingPipe(object):
669             def __init__(self, inner_file):
670                 self.inner_file = inner_file
671                 self.buf = ''
672                 self.softspace = 0
673             def write(self, data):
674                 buf = self.buf + data
675                 lines = buf.split('\n')
676                 for line in lines[:-1]:
677                     self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
678                     self.inner_file.flush()
679                 self.buf = lines[-1]
680             def flush(self):
681                 pass
682         logfile = ReopeningFile(os.path.join(os.path.dirname(sys.argv[0]), 'debug.log'), 'w')
683         sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
684         if hasattr(signal, "SIGUSR1"):
685             def sigusr1(signum, frame):
686                 print '''Caught SIGUSR1, closing 'debug.log'...'''
687                 logfile.reopen()
688                 print '''...and reopened 'debug.log' after catching SIGUSR1.'''
689             signal.signal(signal.SIGUSR1, sigusr1)
690     
691     if args.bitcoind_p2p_port is None:
692         args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
693     
694     if args.p2pool_port is None:
695         args.p2pool_port = args.net.P2P_PORT
696     
697     if args.address is not None:
698         try:
699             args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
700         except Exception, e:
701             raise ValueError('error parsing address: ' + repr(e))
702     else:
703         args.pubkey_hash = None
704     
705     reactor.callWhenRunning(main, args)
706     reactor.run()