early support for inclusion of stale counts in shares
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2
3 from __future__ import division
4
5 import argparse
6 import datetime
7 import itertools
8 import os
9 import random
10 import sqlite3
11 import struct
12 import sys
13 import time
14 import json
15 import signal
16 import traceback
17
18 from twisted.internet import defer, reactor, task
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
22
23 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
24 from util import db, expiring_dict, jsonrpc, variable, deferral, math
25 from . import p2p, worker_interface, skiplists
26 import p2pool.data as p2pool
27 import p2pool as p2pool_init
28
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind):
32     # a block could arrive in between these two queries
33     getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
34     try:
35         getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
36     finally:
37         # get rid of residual errors
38         getwork_df.addErrback(lambda fail: None)
39         height_df.addErrback(lambda fail: None)
40     defer.returnValue((getwork, height))
41
42 @deferral.retry('Error getting payout script from bitcoind:', 1)
43 @defer.inlineCallbacks
44 def get_payout_script(factory):
45     res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
46     if res['reply'] == 'success':
47         defer.returnValue(res['script'])
48     elif res['reply'] == 'denied':
49         defer.returnValue(None)
50     else:
51         raise ValueError('Unexpected reply: %r' % (res,))
52
53 @deferral.retry('Error creating payout script:', 10)
54 @defer.inlineCallbacks
55 def get_payout_script2(bitcoind, net):
56     defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
57
58 @defer.inlineCallbacks
59 def main(args):
60     try:
61         if args.charts:
62             from . import draw
63         
64         print 'p2pool (version %s)' % (p2pool_init.__version__,)
65         print
66         
67         # connect to bitcoind over JSON-RPC and do initial getwork
68         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
69         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
70         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
71         good = yield args.net.BITCOIN_RPC_CHECK(bitcoind)
72         if not good:
73             print "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
74             return
75         temp_work, temp_height = yield getwork(bitcoind)
76         print '    ...success!'
77         print '    Current block hash: %x height: %i' % (temp_work.previous_block, temp_height)
78         print
79         
80         # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
81         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
82         factory = bitcoin.p2p.ClientFactory(args.net)
83         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
84         my_script = yield get_payout_script(factory)
85         if args.pubkey_hash is None:
86             if my_script is None:
87                 print '    IP transaction denied ... falling back to sending to address.'
88                 my_script = yield get_payout_script2(bitcoind, args.net)
89         else:
90             my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
91         print '    ...success!'
92         print '    Payout script:', my_script.encode('hex')
93         print
94         
95         print 'Loading cached block headers...'
96         ht = bitcoin.p2p.HeightTracker(factory, args.net.HEADERSTORE_FILENAME)
97         print '   ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
98         print
99         
100         tracker = p2pool.OkayTracker(args.net)
101         ss = p2pool.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.SHARESTORE_FILENAME), args.net)
102         known_verified = set()
103         print "Loading shares..."
104         for i, (mode, contents) in enumerate(ss.get_shares()):
105             if mode == 'share':
106                 if contents.hash in tracker.shares:
107                     continue
108                 contents.shared = True
109                 contents.stored = True
110                 tracker.add(contents)
111                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
112                     print "    %i" % (len(tracker.shares),)
113             elif mode == 'verified_hash':
114                 known_verified.add(contents)
115             else:
116                 raise AssertionError()
117         print "    ...inserting %i verified shares..." % (len(known_verified),)
118         for h in known_verified:
119             if h not in tracker.shares:
120                 continue
121             tracker.verified.add(tracker.shares[h])
122         print "    ...done loading %i shares!" % (len(tracker.shares),)
123         print
124         tracker.added.watch(ss.add_share)
125         tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
126         
127         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
128         
129         # information affecting work that should trigger a long-polling update
130         current_work = variable.Variable(None)
131         # information affecting work that should not trigger a long-polling update
132         current_work2 = variable.Variable(None)
133         
134         work_updated = variable.Event()
135         
136         requested = expiring_dict.ExpiringDict(300)
137         
138         @defer.inlineCallbacks
139         def set_real_work1():
140             work, height = yield getwork(bitcoind)
141             changed = work.previous_block != current_work.value['previous_block'] if current_work.value is not None else True
142             current_work.set(dict(
143                 version=work.version,
144                 previous_block=work.previous_block,
145                 target=work.target,
146                 height=height,
147                 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
148             ))
149             current_work2.set(dict(
150                 clock_offset=time.time() - work.timestamp,
151             ))
152             if changed:
153                 set_real_work2()
154         
155         def set_real_work2():
156             best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
157             
158             t = dict(current_work.value)
159             t['best_share_hash'] = best
160             current_work.set(t)
161             
162             t = time.time()
163             for peer2, share_hash in desired:
164                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
165                     continue
166                 last_request_time, count = requested.get(share_hash, (None, 0))
167                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
168                     continue
169                 potential_peers = set()
170                 for head in tracker.tails[share_hash]:
171                     potential_peers.update(peer_heads.get(head, set()))
172                 potential_peers = [peer for peer in potential_peers if peer.connected2]
173                 if count == 0 and peer2 is not None and peer2.connected2:
174                     peer = peer2
175                 else:
176                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
177                     if peer is None:
178                         continue
179                 
180                 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
181                 peer.send_getshares(
182                     hashes=[share_hash],
183                     parents=2000,
184                     stops=list(set(tracker.heads) | set(
185                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
186                     ))[:100],
187                 )
188                 requested[share_hash] = t, count + 1
189         
190         print 'Initializing work...'
191         yield set_real_work1()
192         set_real_work2()
193         print '    ...success!'
194         print
195         
196         start_time = time.time() - current_work2.value['clock_offset']
197         
198         # setup p2p logic and join p2pool network
199         
200         def share_share(share, ignore_peer=None):
201             for peer in p2p_node.peers.itervalues():
202                 if peer is ignore_peer:
203                     continue
204                 #if p2pool_init.DEBUG:
205                 #    print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
206                 peer.send_shares([share])
207             share.flag_shared()
208         
209         def p2p_shares(shares, peer=None):
210             if len(shares) > 5:
211                 print 'Processing %i shares...' % (len(shares),)
212             
213             some_new = False
214             for share in shares:
215                 if share.hash in tracker.shares:
216                     #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
217                     continue
218                 some_new = True
219                 
220                 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
221                 
222                 tracker.add(share)
223                 #for peer2, share_hash in desired:
224                 #    print 'Requesting parent share %x' % (share_hash,)
225                 #    peer2.send_getshares(hashes=[share_hash], parents=2000)
226                 
227                 if share.bitcoin_hash <= share.header['target']:
228                     print
229                     print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
230                     print
231                     if factory.conn.value is not None:
232                         factory.conn.value.send_block(block=share.as_block(tracker, args.net))
233                     else:
234                         print 'No bitcoind connection! Erp!'
235             
236             if shares and peer is not None:
237                 peer_heads.setdefault(shares[0].hash, set()).add(peer)
238             
239             if some_new:
240                 set_real_work2()
241             
242             if len(shares) > 5:
243                 print '... done processing %i shares. Have: %i/~%i' % (len(shares), len(tracker.shares), 2*args.net.CHAIN_LENGTH)
244         
245         def p2p_share_hashes(share_hashes, peer):
246             t = time.time()
247             get_hashes = []
248             for share_hash in share_hashes:
249                 if share_hash in tracker.shares:
250                     continue
251                 last_request_time, count = requested.get(share_hash, (None, 0))
252                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
253                     continue
254                 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
255                 get_hashes.append(share_hash)
256                 requested[share_hash] = t, count + 1
257             
258             if share_hashes and peer is not None:
259                 peer_heads.setdefault(share_hashes[0], set()).add(peer)
260             if get_hashes:
261                 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
262         
263         def p2p_get_shares(share_hashes, parents, stops, peer):
264             parents = min(parents, 1000//len(share_hashes))
265             stops = set(stops)
266             shares = []
267             for share_hash in share_hashes:
268                 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
269                     if share.hash in stops:
270                         break
271                     shares.append(share)
272             print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
273             peer.send_shares(shares, full=True)
274         
275         print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
276         
277         def parse(x):
278             if ':' in x:
279                 ip, port = x.split(':')
280                 return ip, int(port)
281             else:
282                 return x, args.net.P2P_PORT
283         
284         nodes = set([
285             ('72.14.191.28', args.net.P2P_PORT),
286             ('62.204.197.159', args.net.P2P_PORT),
287             ('142.58.248.28', args.net.P2P_PORT),
288             ('94.23.34.145', args.net.P2P_PORT),
289         ])
290         for host in [
291             'p2pool.forre.st',
292             'dabuttonfactory.com',
293         ]:
294             try:
295                 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
296             except:
297                 log.err(None, 'Error resolving bootstrap node IP:')
298         
299         p2p_node = p2p.Node(
300             current_work=current_work,
301             port=args.p2pool_port,
302             net=args.net,
303             addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
304             mode=0 if args.low_bandwidth else 1,
305             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
306         )
307         p2p_node.handle_shares = p2p_shares
308         p2p_node.handle_share_hashes = p2p_share_hashes
309         p2p_node.handle_get_shares = p2p_get_shares
310         
311         p2p_node.start()
312         
313         # send share when the chain changes to their chain
314         def work_changed(new_work):
315             #print 'Work changed:', new_work
316             for share in tracker.get_chain_known(new_work['best_share_hash']):
317                 if share.shared:
318                     break
319                 share_share(share, share.peer)
320         current_work.changed.watch(work_changed)
321         
322         print '    ...success!'
323         print
324         
325         @defer.inlineCallbacks
326         def upnp_thread():
327             while True:
328                 try:
329                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
330                     if is_lan:
331                         pm = yield portmapper.get_port_mapper()
332                         yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
333                 except:
334                     if p2pool_init.DEBUG:
335                         log.err(None, "UPnP error:")
336                 yield deferral.sleep(random.expovariate(1/120))
337         
338         if args.upnp:
339             upnp_thread()
340         
341         # start listening for workers with a JSON-RPC server
342         
343         print 'Listening for workers on port %i...' % (args.worker_port,)
344         
345         # setup worker logic
346         
347         merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
348         run_identifier = struct.pack('<Q', random.randrange(2**64))
349         
350         share_counter = skiplists.CountsSkipList(tracker, run_identifier)
351         def get_share_counts():
352             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
353             matching_in_chain = share_counter(current_work.value['best_share_hash'], height)
354             shares_in_chain = my_shares & matching_in_chain
355             stale_shares = my_shares - matching_in_chain
356             return len(shares_in_chain) + len(stale_shares), len(stale_shares)
357         
358         def compute(state, payout_script):
359             if payout_script is None:
360                 payout_script = my_script
361             if state['best_share_hash'] is None and args.net.PERSIST:
362                 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
363             if len(p2p_node.peers) == 0 and args.net.PERSIST:
364                 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
365             pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
366             pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
367             extra_txs = []
368             size = 0
369             for tx in pre_extra_txs:
370                 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
371                 if size + this_size > 500000:
372                     break
373                 extra_txs.append(tx)
374                 size += this_size
375             # XXX check sigops!
376             # XXX assuming generate_tx is smallish here..
377             def get_stale_frac():
378                 shares, stale_shares = get_share_counts()
379                 if shares == 0:
380                     return ""
381                 frac = stale_shares/shares
382                 return 2*struct.pack('<H', int(65535*frac + .5))
383             generate_tx = p2pool.generate_transaction(
384                 tracker=tracker,
385                 previous_share_hash=state['best_share_hash'],
386                 new_script=payout_script,
387                 subsidy=args.net.BITCOIN_SUBSIDY_FUNC(state['height']) + sum(tx.value_in - tx.value_out for tx in extra_txs),
388                 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)) + get_stale_frac(),
389                 block_target=state['target'],
390                 net=args.net,
391             )
392             print 'Generating! Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
393             #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
394             #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
395             transactions = [generate_tx] + [tx.tx for tx in extra_txs]
396             merkle_root = bitcoin.data.merkle_hash(transactions)
397             merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
398             
399             timestamp = int(time.time() - current_work2.value['clock_offset'])
400             if state['best_share_hash'] is not None:
401                 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
402                 if timestamp2 > timestamp:
403                     print 'Toff', timestamp2 - timestamp
404                     timestamp = timestamp2
405             target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
406             times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
407             #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
408             return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
409         
410         my_shares = set()
411         times = {}
412         
413         def got_response(data):
414             try:
415                 # match up with transactions
416                 header = bitcoin.getwork.decode_data(data)
417                 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
418                 if transactions is None:
419                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
420                     return False
421                 block = dict(header=header, txs=transactions)
422                 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
423                 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
424                     if factory.conn.value is not None:
425                         factory.conn.value.send_block(block=block)
426                     else:
427                         print 'No bitcoind connection! Erp!'
428                     if hash_ <= block['header']['target']:
429                         print
430                         print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
431                         print
432                 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
433                 if hash_ > target:
434                     print 'Received invalid share from worker - %x/%x' % (hash_, target)
435                     return False
436                 share = p2pool.Share.from_block(block)
437                 my_shares.add(share.hash)
438                 print 'GOT SHARE! %s prev %s age %.2fs' % (p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
439                 good = share.previous_hash == current_work.value['best_share_hash']
440                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
441                 p2p_shares([share])
442                 # eg. good = share.hash == current_work.value['best_share_hash'] here
443                 return good
444             except:
445                 log.err(None, 'Error processing data received from worker:')
446                 return False
447         
448         web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
449         
450         def get_rate():
451             if current_work.value['best_share_hash'] is not None:
452                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
453                 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height, 720))
454                 return json.dumps(att_s)
455             return json.dumps(None)
456         
457         def get_users():
458             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
459             weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
460             res = {}
461             for script in sorted(weights, key=lambda s: weights[s]):
462                 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
463             return json.dumps(res)
464         
465         class WebInterface(resource.Resource):
466             def __init__(self, func, mime_type):
467                 self.func, self.mime_type = func, mime_type
468             
469             def render_GET(self, request):
470                 request.setHeader('Content-Type', self.mime_type)
471                 return self.func()
472         
473         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
474         web_root.putChild('users', WebInterface(get_users, 'application/json'))
475         if args.charts:
476             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
477         
478         reactor.listenTCP(args.worker_port, server.Site(web_root))
479         
480         print '    ...success!'
481         print
482         
483         # done!
484         
485         tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
486         get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
487         
488         class Tx(object):
489             def __init__(self, tx, seen_at_block):
490                 self.hash = bitcoin.data.tx_type.hash256(tx)
491                 self.tx = tx
492                 self.seen_at_block = seen_at_block
493                 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
494                 #print
495                 #print '%x %r' % (seen_at_block, tx)
496                 #for mention in self.mentions:
497                 #    print '%x' % mention
498                 #print
499                 self.parents_all_in_blocks = False
500                 self.value_in = 0
501                 #print self.tx
502                 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
503                 self._find_parents_in_blocks()
504             
505             @defer.inlineCallbacks
506             def _find_parents_in_blocks(self):
507                 for tx_in in self.tx['tx_ins']:
508                     try:
509                         raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
510                     except Exception:
511                         return
512                     self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
513                     #print raw_transaction
514                     if not raw_transaction['parent_blocks']:
515                         return
516                 self.parents_all_in_blocks = True
517             
518             def is_good(self):
519                 if not self.parents_all_in_blocks:
520                     return False
521                 x = self.is_good2()
522                 #print 'is_good:', x
523                 return x
524         
525         @defer.inlineCallbacks
526         def new_tx(tx_hash):
527             try:
528                 assert isinstance(tx_hash, (int, long))
529                 #print 'REQUESTING', tx_hash
530                 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
531                 #print 'GOT', tx
532                 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
533             except:
534                 log.err(None, 'Error handling tx:')
535         # disable for now, for testing impact on stales
536         #factory.new_tx.watch(new_tx)
537         
538         def new_block(block_hash):
539             work_updated.happened()
540         factory.new_block.watch(new_block)
541         
542         print 'Started successfully!'
543         print
544         
545         ht.updated.watch(set_real_work2)
546         
547         @defer.inlineCallbacks
548         def work1_thread():
549             while True:
550                 flag = work_updated.get_deferred()
551                 try:
552                     yield set_real_work1()
553                 except:
554                     log.err()
555                 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/20))], fireOnOneCallback=True)
556         
557         @defer.inlineCallbacks
558         def work2_thread():
559             while True:
560                 try:
561                     set_real_work2()
562                 except:
563                     log.err()
564                 yield deferral.sleep(random.expovariate(1/20))
565         
566         work1_thread()
567         work2_thread()
568         
569         
570         def watchdog_handler(signum, frame):
571             print "Watchdog timer went off at:"
572             traceback.print_exc()
573         
574         signal.signal(signal.SIGALRM, watchdog_handler)
575         task.LoopingCall(signal.alarm, 30).start(1)
576         
577         
578         def read_stale_frac(share):
579             if len(share.nonce) != 20:
580                 return None
581             a, b = struct.unpack("<HH", share.nonce[-4:])
582             if a != b:
583                 return None
584             return a/65535
585         
586         while True:
587             yield deferral.sleep(3)
588             try:
589                 if current_work.value['best_share_hash'] is not None:
590                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
591                     if height > 2:
592                         att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 120))
593                         weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
594                         shares, stale_shares = get_share_counts()
595                         print 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i stale) Peers: %i' % (
596                             math.format(att_s),
597                             height,
598                             len(tracker.verified.shares),
599                             len(tracker.shares),
600                             weights.get(my_script, 0)/total_weight*100,
601                             math.format(weights.get(my_script, 0)/total_weight*att_s),
602                             shares,
603                             stale_shares,
604                             len(p2p_node.peers),
605                         ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
606                         fracs = [read_stale_frac(share) for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if read_stale_frac(share) is not None]
607                         if fracs:
608                             med = math.median(fracs)
609                             print 'Median stale proportion:', med
610                             if shares:
611                                 print '    Own:', stale_shares/shares
612                                 if med < .99:
613                                     print '    Own efficiency: %.02f%%' % (100*(1 - stale_shares/shares)/(1 - med),)
614                             
615                             
616             except:
617                 log.err()
618     except:
619         log.err(None, 'Fatal error:')
620     finally:
621         reactor.stop()
622
623 def run():
624     parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,), fromfile_prefix_chars='@')
625     parser.convert_arg_line_to_args = lambda arg_line: (arg for arg in arg_line.split() if arg.strip())
626     parser.add_argument('--version', action='version', version=p2pool_init.__version__)
627     parser.add_argument('--net',
628         help='use specified network (choices: bitcoin (default), namecoin, ixcoin)',
629         action='store', choices=set(['bitcoin', 'namecoin', 'ixcoin']), default='bitcoin', dest='net_name')
630     parser.add_argument('--testnet',
631         help='use the testnet',
632         action='store_const', const=True, default=False, dest='testnet')
633     parser.add_argument('--debug',
634         help='debugging mode',
635         action='store_const', const=True, default=False, dest='debug')
636     parser.add_argument('-a', '--address',
637         help='generate to this address (defaults to requesting one from bitcoind)',
638         type=str, action='store', default=None, dest='address')
639     parser.add_argument('--charts',
640         help='generate charts on the web interface (requires PIL and pygame)',
641         action='store_const', const=True, default=False, dest='charts')
642     
643     p2pool_group = parser.add_argument_group('p2pool interface')
644     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
645         help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
646         type=int, action='store', default=None, dest='p2pool_port')
647     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
648         help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
649         type=str, action='append', default=[], dest='p2pool_nodes')
650     parser.add_argument('-l', '--low-bandwidth',
651         help='trade lower bandwidth usage for higher latency (reduced efficiency)',
652         action='store_true', default=False, dest='low_bandwidth')
653     parser.add_argument('--disable-upnp',
654         help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
655         action='store_false', default=True, dest='upnp')
656     
657     worker_group = parser.add_argument_group('worker interface')
658     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
659         help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
660         type=int, action='store', default=9332, dest='worker_port')
661     
662     bitcoind_group = parser.add_argument_group('bitcoind interface')
663     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
664         help='connect to a bitcoind at this address (default: 127.0.0.1)',
665         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
666     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
667         help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
668         type=int, action='store', default=8332, dest='bitcoind_rpc_port')
669     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
670         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
671         type=int, action='store', default=None, dest='bitcoind_p2p_port')
672     
673     bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
674         help='bitcoind RPC interface username',
675         type=str, action='store', dest='bitcoind_rpc_username')
676     bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
677         help='bitcoind RPC interface password',
678         type=str, action='store', dest='bitcoind_rpc_password')
679     
680     args = parser.parse_args()
681     
682     if args.debug:
683         p2pool_init.DEBUG = True
684         class ReopeningFile(object):
685             def __init__(self, *open_args, **open_kwargs):
686                 self.open_args, self.open_kwargs = open_args, open_kwargs
687                 self.inner_file = open(*self.open_args, **self.open_kwargs)
688             def reopen(self):
689                 self.inner_file.close()
690                 self.inner_file = open(*self.open_args, **self.open_kwargs)
691             def write(self, data):
692                 self.inner_file.write(data)
693             def flush(self):
694                 self.inner_file.flush()
695         class TeePipe(object):
696             def __init__(self, outputs):
697                 self.outputs = outputs
698             def write(self, data):
699                 for output in self.outputs:
700                     output.write(data)
701             def flush(self):
702                 for output in self.outputs:
703                     output.flush()
704         class TimestampingPipe(object):
705             def __init__(self, inner_file):
706                 self.inner_file = inner_file
707                 self.buf = ''
708                 self.softspace = 0
709             def write(self, data):
710                 buf = self.buf + data
711                 lines = buf.split('\n')
712                 for line in lines[:-1]:
713                     self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
714                     self.inner_file.flush()
715                 self.buf = lines[-1]
716             def flush(self):
717                 pass
718         logfile = ReopeningFile(os.path.join(os.path.dirname(sys.argv[0]), 'debug.log'), 'w')
719         sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
720         if hasattr(signal, "SIGUSR1"):
721             def sigusr1(signum, frame):
722                 print '''Caught SIGUSR1, closing 'debug.log'...'''
723                 logfile.reopen()
724                 print '''...and reopened 'debug.log' after catching SIGUSR1.'''
725             signal.signal(signal.SIGUSR1, sigusr1)
726     
727     args.net = {
728         ('bitcoin', False): p2pool.Mainnet,
729         ('bitcoin', True): p2pool.Testnet,
730         ('namecoin', False): p2pool.NamecoinMainnet,
731         ('namecoin', True): p2pool.NamecoinTestnet,
732         ('ixcoin', False): p2pool.IxcoinMainnet,
733         ('ixcoin', True): p2pool.IxcoinTestnet,
734     }[args.net_name, args.testnet]
735     
736     if args.bitcoind_p2p_port is None:
737         args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
738     
739     if args.p2pool_port is None:
740         args.p2pool_port = args.net.P2P_PORT
741     
742     if args.address is not None:
743         try:
744             args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
745         except Exception, e:
746             raise ValueError('error parsing address: ' + repr(e))
747     else:
748         args.pubkey_hash = None
749     
750     reactor.callWhenRunning(main, args)
751     reactor.run()