remove RPC calls to bitcoind's getblocknumber - should make p2pool look just like...
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2
3 from __future__ import division
4
5 import argparse
6 import datetime
7 import itertools
8 import os
9 import random
10 import sqlite3
11 import struct
12 import sys
13 import time
14 import json
15 import signal
16 import traceback
17
18 from twisted.internet import defer, reactor, task
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
22
23 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
24 from util import db, expiring_dict, jsonrpc, variable, deferral, math
25 from . import p2p, worker_interface, skiplists
26 import p2pool.data as p2pool
27 import p2pool as p2pool_init
28
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind, ht):
32     # a block could arrive in between these two queries
33     work = bitcoin.getwork.BlockAttempt.from_getwork((yield bitcoind.rpc_getwork()))
34     height = ht.getHeight(work.previous_block)
35     defer.returnValue((work, height))
36
37 @deferral.retry('Error getting payout script from bitcoind:', 1)
38 @defer.inlineCallbacks
39 def get_payout_script(factory):
40     res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
41     if res['reply'] == 'success':
42         defer.returnValue(res['script'])
43     elif res['reply'] == 'denied':
44         defer.returnValue(None)
45     else:
46         raise ValueError('Unexpected reply: %r' % (res,))
47
48 @deferral.retry('Error creating payout script:', 10)
49 @defer.inlineCallbacks
50 def get_payout_script2(bitcoind, net):
51     defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
52
53 @defer.inlineCallbacks
54 def main(args):
55     try:
56         if args.charts:
57             from . import draw
58         
59         print 'p2pool (version %s)' % (p2pool_init.__version__,)
60         print
61         
62         # connect to bitcoind over JSON-RPC and do initial getwork
63         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
64         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
65         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
66         good = yield args.net.BITCOIN_RPC_CHECK(bitcoind)
67         if not good:
68             print "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
69             return
70         temp_work = bitcoin.getwork.BlockAttempt.from_getwork((yield bitcoind.rpc_getwork()))
71         print '    ...success!'
72         print '    Current block hash: %x' % (temp_work.previous_block,)
73         print
74         
75         # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
76         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
77         factory = bitcoin.p2p.ClientFactory(args.net)
78         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
79         my_script = yield get_payout_script(factory)
80         if args.pubkey_hash is None:
81             if my_script is None:
82                 print '    IP transaction denied ... falling back to sending to address.'
83                 my_script = yield get_payout_script2(bitcoind, args.net)
84         else:
85             my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
86         print '    ...success!'
87         print '    Payout script:', my_script.encode('hex')
88         print
89         
90         print 'Loading cached block headers...'
91         ht = bitcoin.p2p.HeightTracker(factory, args.net.HEADERSTORE_FILENAME)
92         print '   ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
93         print
94         
95         tracker = p2pool.OkayTracker(args.net)
96         ss = p2pool.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.SHARESTORE_FILENAME), args.net)
97         known_verified = set()
98         print "Loading shares..."
99         for i, (mode, contents) in enumerate(ss.get_shares()):
100             if mode == 'share':
101                 if contents.hash in tracker.shares:
102                     continue
103                 contents.shared = True
104                 contents.stored = True
105                 tracker.add(contents)
106                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
107                     print "    %i" % (len(tracker.shares),)
108             elif mode == 'verified_hash':
109                 known_verified.add(contents)
110             else:
111                 raise AssertionError()
112         print "    ...inserting %i verified shares..." % (len(known_verified),)
113         for h in known_verified:
114             if h not in tracker.shares:
115                 continue
116             tracker.verified.add(tracker.shares[h])
117         print "    ...done loading %i shares!" % (len(tracker.shares),)
118         print
119         tracker.added.watch(ss.add_share)
120         tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
121         
122         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
123         
124         # information affecting work that should trigger a long-polling update
125         current_work = variable.Variable(None)
126         # information affecting work that should not trigger a long-polling update
127         current_work2 = variable.Variable(None)
128         
129         work_updated = variable.Event()
130         
131         requested = expiring_dict.ExpiringDict(300)
132         
133         @defer.inlineCallbacks
134         def set_real_work1():
135             work, height = yield getwork(bitcoind, ht)
136             changed = work.previous_block != current_work.value['previous_block'] if current_work.value is not None else True
137             current_work.set(dict(
138                 version=work.version,
139                 previous_block=work.previous_block,
140                 target=work.target,
141                 height=height,
142                 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
143             ))
144             current_work2.set(dict(
145                 clock_offset=time.time() - work.timestamp,
146             ))
147             if changed:
148                 set_real_work2()
149         
150         def set_real_work2():
151             best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
152             
153             t = dict(current_work.value)
154             t['best_share_hash'] = best
155             current_work.set(t)
156             
157             t = time.time()
158             for peer2, share_hash in desired:
159                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
160                     continue
161                 last_request_time, count = requested.get(share_hash, (None, 0))
162                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
163                     continue
164                 potential_peers = set()
165                 for head in tracker.tails[share_hash]:
166                     potential_peers.update(peer_heads.get(head, set()))
167                 potential_peers = [peer for peer in potential_peers if peer.connected2]
168                 if count == 0 and peer2 is not None and peer2.connected2:
169                     peer = peer2
170                 else:
171                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
172                     if peer is None:
173                         continue
174                 
175                 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
176                 peer.send_getshares(
177                     hashes=[share_hash],
178                     parents=2000,
179                     stops=list(set(tracker.heads) | set(
180                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
181                     ))[:100],
182                 )
183                 requested[share_hash] = t, count + 1
184         
185         print 'Initializing work...'
186         yield set_real_work1()
187         set_real_work2()
188         print '    ...success!'
189         print
190         
191         start_time = time.time() - current_work2.value['clock_offset']
192         
193         # setup p2p logic and join p2pool network
194         
195         def share_share(share, ignore_peer=None):
196             for peer in p2p_node.peers.itervalues():
197                 if peer is ignore_peer:
198                     continue
199                 #if p2pool_init.DEBUG:
200                 #    print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
201                 peer.send_shares([share])
202             share.flag_shared()
203         
204         def p2p_shares(shares, peer=None):
205             if len(shares) > 5:
206                 print 'Processing %i shares...' % (len(shares),)
207             
208             some_new = False
209             for share in shares:
210                 if share.hash in tracker.shares:
211                     #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
212                     continue
213                 some_new = True
214                 
215                 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
216                 
217                 tracker.add(share)
218                 #for peer2, share_hash in desired:
219                 #    print 'Requesting parent share %x' % (share_hash,)
220                 #    peer2.send_getshares(hashes=[share_hash], parents=2000)
221                 
222                 if share.bitcoin_hash <= share.header['target']:
223                     print
224                     print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
225                     print
226                     if factory.conn.value is not None:
227                         factory.conn.value.send_block(block=share.as_block(tracker, args.net))
228                     else:
229                         print 'No bitcoind connection! Erp!'
230             
231             if shares and peer is not None:
232                 peer_heads.setdefault(shares[0].hash, set()).add(peer)
233             
234             if some_new:
235                 set_real_work2()
236             
237             if len(shares) > 5:
238                 print '... done processing %i shares. Have: %i/~%i' % (len(shares), len(tracker.shares), 2*args.net.CHAIN_LENGTH)
239         
240         def p2p_share_hashes(share_hashes, peer):
241             t = time.time()
242             get_hashes = []
243             for share_hash in share_hashes:
244                 if share_hash in tracker.shares:
245                     continue
246                 last_request_time, count = requested.get(share_hash, (None, 0))
247                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
248                     continue
249                 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
250                 get_hashes.append(share_hash)
251                 requested[share_hash] = t, count + 1
252             
253             if share_hashes and peer is not None:
254                 peer_heads.setdefault(share_hashes[0], set()).add(peer)
255             if get_hashes:
256                 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
257         
258         def p2p_get_shares(share_hashes, parents, stops, peer):
259             parents = min(parents, 1000//len(share_hashes))
260             stops = set(stops)
261             shares = []
262             for share_hash in share_hashes:
263                 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
264                     if share.hash in stops:
265                         break
266                     shares.append(share)
267             print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
268             peer.send_shares(shares, full=True)
269         
270         print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
271         
272         def parse(x):
273             if ':' in x:
274                 ip, port = x.split(':')
275                 return ip, int(port)
276             else:
277                 return x, args.net.P2P_PORT
278         
279         nodes = set([
280             ('72.14.191.28', args.net.P2P_PORT),
281             ('62.204.197.159', args.net.P2P_PORT),
282             ('142.58.248.28', args.net.P2P_PORT),
283             ('94.23.34.145', args.net.P2P_PORT),
284         ])
285         for host in [
286             'p2pool.forre.st',
287             'dabuttonfactory.com',
288         ]:
289             try:
290                 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
291             except:
292                 log.err(None, 'Error resolving bootstrap node IP:')
293         
294         p2p_node = p2p.Node(
295             current_work=current_work,
296             port=args.p2pool_port,
297             net=args.net,
298             addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
299             mode=0 if args.low_bandwidth else 1,
300             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
301         )
302         p2p_node.handle_shares = p2p_shares
303         p2p_node.handle_share_hashes = p2p_share_hashes
304         p2p_node.handle_get_shares = p2p_get_shares
305         
306         p2p_node.start()
307         
308         # send share when the chain changes to their chain
309         def work_changed(new_work):
310             #print 'Work changed:', new_work
311             for share in tracker.get_chain_known(new_work['best_share_hash']):
312                 if share.shared:
313                     break
314                 share_share(share, share.peer)
315         current_work.changed.watch(work_changed)
316         
317         print '    ...success!'
318         print
319         
320         @defer.inlineCallbacks
321         def upnp_thread():
322             while True:
323                 try:
324                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
325                     if is_lan:
326                         pm = yield portmapper.get_port_mapper()
327                         yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
328                 except:
329                     if p2pool_init.DEBUG:
330                         log.err(None, "UPnP error:")
331                 yield deferral.sleep(random.expovariate(1/120))
332         
333         if args.upnp:
334             upnp_thread()
335         
336         # start listening for workers with a JSON-RPC server
337         
338         print 'Listening for workers on port %i...' % (args.worker_port,)
339         
340         # setup worker logic
341         
342         merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
343         run_identifier = struct.pack('<Q', random.randrange(2**64))
344         
345         share_counter = skiplists.CountsSkipList(tracker, run_identifier)
346         def get_share_counts():
347             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
348             matching_in_chain = share_counter(current_work.value['best_share_hash'], height)
349             shares_in_chain = my_shares & matching_in_chain
350             stale_shares = my_shares - matching_in_chain
351             return len(shares_in_chain) + len(stale_shares), len(stale_shares)
352         
353         def compute(state, payout_script):
354             if payout_script is None:
355                 payout_script = my_script
356             if state['best_share_hash'] is None and args.net.PERSIST:
357                 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
358             if len(p2p_node.peers) == 0 and args.net.PERSIST:
359                 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
360             pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
361             pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
362             extra_txs = []
363             size = 0
364             for tx in pre_extra_txs:
365                 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
366                 if size + this_size > 500000:
367                     break
368                 extra_txs.append(tx)
369                 size += this_size
370             # XXX check sigops!
371             # XXX assuming generate_tx is smallish here..
372             def get_stale_frac():
373                 shares, stale_shares = get_share_counts()
374                 if shares == 0:
375                     return ""
376                 frac = stale_shares/shares
377                 return 2*struct.pack('<H', int(65535*frac + .5))
378             generate_tx = p2pool.generate_transaction(
379                 tracker=tracker,
380                 previous_share_hash=state['best_share_hash'],
381                 new_script=payout_script,
382                 subsidy=args.net.BITCOIN_SUBSIDY_FUNC(state['height']) + sum(tx.value_in - tx.value_out for tx in extra_txs),
383                 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)) + get_stale_frac(),
384                 block_target=state['target'],
385                 net=args.net,
386             )
387             print 'Generating! Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
388             #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
389             #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
390             transactions = [generate_tx] + [tx.tx for tx in extra_txs]
391             merkle_root = bitcoin.data.merkle_hash(transactions)
392             merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
393             
394             timestamp = int(time.time() - current_work2.value['clock_offset'])
395             if state['best_share_hash'] is not None:
396                 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
397                 if timestamp2 > timestamp:
398                     print 'Toff', timestamp2 - timestamp
399                     timestamp = timestamp2
400             target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
401             times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
402             #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
403             return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
404         
405         my_shares = set()
406         times = {}
407         
408         def got_response(data):
409             try:
410                 # match up with transactions
411                 header = bitcoin.getwork.decode_data(data)
412                 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
413                 if transactions is None:
414                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
415                     return False
416                 block = dict(header=header, txs=transactions)
417                 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
418                 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
419                     if factory.conn.value is not None:
420                         factory.conn.value.send_block(block=block)
421                     else:
422                         print 'No bitcoind connection! Erp!'
423                     if hash_ <= block['header']['target']:
424                         print
425                         print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
426                         print
427                 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
428                 if hash_ > target:
429                     print 'Received invalid share from worker - %x/%x' % (hash_, target)
430                     return False
431                 share = p2pool.Share.from_block(block)
432                 my_shares.add(share.hash)
433                 print 'GOT SHARE! %s prev %s age %.2fs' % (p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
434                 good = share.previous_hash == current_work.value['best_share_hash']
435                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
436                 p2p_shares([share])
437                 # eg. good = share.hash == current_work.value['best_share_hash'] here
438                 return good
439             except:
440                 log.err(None, 'Error processing data received from worker:')
441                 return False
442         
443         web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
444         
445         def get_rate():
446             if current_work.value['best_share_hash'] is not None:
447                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
448                 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height, 720))
449                 return json.dumps(att_s)
450             return json.dumps(None)
451         
452         def get_users():
453             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
454             weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
455             res = {}
456             for script in sorted(weights, key=lambda s: weights[s]):
457                 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
458             return json.dumps(res)
459         
460         class WebInterface(resource.Resource):
461             def __init__(self, func, mime_type):
462                 self.func, self.mime_type = func, mime_type
463             
464             def render_GET(self, request):
465                 request.setHeader('Content-Type', self.mime_type)
466                 return self.func()
467         
468         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
469         web_root.putChild('users', WebInterface(get_users, 'application/json'))
470         if args.charts:
471             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
472         
473         reactor.listenTCP(args.worker_port, server.Site(web_root))
474         
475         print '    ...success!'
476         print
477         
478         # done!
479         
480         tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
481         get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
482         
483         class Tx(object):
484             def __init__(self, tx, seen_at_block):
485                 self.hash = bitcoin.data.tx_type.hash256(tx)
486                 self.tx = tx
487                 self.seen_at_block = seen_at_block
488                 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
489                 #print
490                 #print '%x %r' % (seen_at_block, tx)
491                 #for mention in self.mentions:
492                 #    print '%x' % mention
493                 #print
494                 self.parents_all_in_blocks = False
495                 self.value_in = 0
496                 #print self.tx
497                 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
498                 self._find_parents_in_blocks()
499             
500             @defer.inlineCallbacks
501             def _find_parents_in_blocks(self):
502                 for tx_in in self.tx['tx_ins']:
503                     try:
504                         raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
505                     except Exception:
506                         return
507                     self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
508                     #print raw_transaction
509                     if not raw_transaction['parent_blocks']:
510                         return
511                 self.parents_all_in_blocks = True
512             
513             def is_good(self):
514                 if not self.parents_all_in_blocks:
515                     return False
516                 x = self.is_good2()
517                 #print 'is_good:', x
518                 return x
519         
520         @defer.inlineCallbacks
521         def new_tx(tx_hash):
522             try:
523                 assert isinstance(tx_hash, (int, long))
524                 #print 'REQUESTING', tx_hash
525                 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
526                 #print 'GOT', tx
527                 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
528             except:
529                 log.err(None, 'Error handling tx:')
530         # disable for now, for testing impact on stales
531         #factory.new_tx.watch(new_tx)
532         
533         def new_block(block_hash):
534             work_updated.happened()
535         factory.new_block.watch(new_block)
536         
537         print 'Started successfully!'
538         print
539         
540         ht.updated.watch(set_real_work2)
541         
542         @defer.inlineCallbacks
543         def work1_thread():
544             while True:
545                 flag = work_updated.get_deferred()
546                 try:
547                     yield set_real_work1()
548                 except:
549                     log.err()
550                 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/20))], fireOnOneCallback=True)
551         
552         @defer.inlineCallbacks
553         def work2_thread():
554             while True:
555                 try:
556                     set_real_work2()
557                 except:
558                     log.err()
559                 yield deferral.sleep(random.expovariate(1/20))
560         
561         work1_thread()
562         work2_thread()
563         
564         
565         def watchdog_handler(signum, frame):
566             print "Watchdog timer went off at:"
567             traceback.print_exc()
568         
569         signal.signal(signal.SIGALRM, watchdog_handler)
570         task.LoopingCall(signal.alarm, 30).start(1)
571         
572         
573         def read_stale_frac(share):
574             if len(share.nonce) != 20:
575                 return None
576             a, b = struct.unpack("<HH", share.nonce[-4:])
577             if a != b:
578                 return None
579             return a/65535
580         
581         while True:
582             yield deferral.sleep(3)
583             try:
584                 if current_work.value['best_share_hash'] is not None:
585                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
586                     if height > 2:
587                         att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 120))
588                         weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
589                         shares, stale_shares = get_share_counts()
590                         print 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i stale) Peers: %i' % (
591                             math.format(att_s),
592                             height,
593                             len(tracker.verified.shares),
594                             len(tracker.shares),
595                             weights.get(my_script, 0)/total_weight*100,
596                             math.format(weights.get(my_script, 0)/total_weight*att_s),
597                             shares,
598                             stale_shares,
599                             len(p2p_node.peers),
600                         ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
601                         fracs = [read_stale_frac(share) for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if read_stale_frac(share) is not None]
602                         if fracs:
603                             med = math.median(fracs)
604                             print 'Median stale proportion:', med
605                             if shares:
606                                 print '    Own:', stale_shares/shares
607                                 if med < .99:
608                                     print '    Own efficiency: %.02f%%' % (100*(1 - stale_shares/shares)/(1 - med),)
609                             
610                             
611             except:
612                 log.err()
613     except:
614         log.err(None, 'Fatal error:')
615     finally:
616         reactor.stop()
617
618 def run():
619     parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,), fromfile_prefix_chars='@')
620     parser.convert_arg_line_to_args = lambda arg_line: (arg for arg in arg_line.split() if arg.strip())
621     parser.add_argument('--version', action='version', version=p2pool_init.__version__)
622     parser.add_argument('--net',
623         help='use specified network (choices: bitcoin (default), namecoin, ixcoin)',
624         action='store', choices=set(['bitcoin', 'namecoin', 'ixcoin']), default='bitcoin', dest='net_name')
625     parser.add_argument('--testnet',
626         help='use the testnet',
627         action='store_const', const=True, default=False, dest='testnet')
628     parser.add_argument('--debug',
629         help='debugging mode',
630         action='store_const', const=True, default=False, dest='debug')
631     parser.add_argument('-a', '--address',
632         help='generate to this address (defaults to requesting one from bitcoind)',
633         type=str, action='store', default=None, dest='address')
634     parser.add_argument('--charts',
635         help='generate charts on the web interface (requires PIL and pygame)',
636         action='store_const', const=True, default=False, dest='charts')
637     
638     p2pool_group = parser.add_argument_group('p2pool interface')
639     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
640         help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
641         type=int, action='store', default=None, dest='p2pool_port')
642     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
643         help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
644         type=str, action='append', default=[], dest='p2pool_nodes')
645     parser.add_argument('-l', '--low-bandwidth',
646         help='trade lower bandwidth usage for higher latency (reduced efficiency)',
647         action='store_true', default=False, dest='low_bandwidth')
648     parser.add_argument('--disable-upnp',
649         help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
650         action='store_false', default=True, dest='upnp')
651     
652     worker_group = parser.add_argument_group('worker interface')
653     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
654         help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
655         type=int, action='store', default=9332, dest='worker_port')
656     
657     bitcoind_group = parser.add_argument_group('bitcoind interface')
658     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
659         help='connect to a bitcoind at this address (default: 127.0.0.1)',
660         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
661     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
662         help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
663         type=int, action='store', default=8332, dest='bitcoind_rpc_port')
664     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
665         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
666         type=int, action='store', default=None, dest='bitcoind_p2p_port')
667     
668     bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
669         help='bitcoind RPC interface username',
670         type=str, action='store', dest='bitcoind_rpc_username')
671     bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
672         help='bitcoind RPC interface password',
673         type=str, action='store', dest='bitcoind_rpc_password')
674     
675     args = parser.parse_args()
676     
677     if args.debug:
678         p2pool_init.DEBUG = True
679         class ReopeningFile(object):
680             def __init__(self, *open_args, **open_kwargs):
681                 self.open_args, self.open_kwargs = open_args, open_kwargs
682                 self.inner_file = open(*self.open_args, **self.open_kwargs)
683             def reopen(self):
684                 self.inner_file.close()
685                 self.inner_file = open(*self.open_args, **self.open_kwargs)
686             def write(self, data):
687                 self.inner_file.write(data)
688             def flush(self):
689                 self.inner_file.flush()
690         class TeePipe(object):
691             def __init__(self, outputs):
692                 self.outputs = outputs
693             def write(self, data):
694                 for output in self.outputs:
695                     output.write(data)
696             def flush(self):
697                 for output in self.outputs:
698                     output.flush()
699         class TimestampingPipe(object):
700             def __init__(self, inner_file):
701                 self.inner_file = inner_file
702                 self.buf = ''
703                 self.softspace = 0
704             def write(self, data):
705                 buf = self.buf + data
706                 lines = buf.split('\n')
707                 for line in lines[:-1]:
708                     self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
709                     self.inner_file.flush()
710                 self.buf = lines[-1]
711             def flush(self):
712                 pass
713         logfile = ReopeningFile(os.path.join(os.path.dirname(sys.argv[0]), 'debug.log'), 'w')
714         sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
715         if hasattr(signal, "SIGUSR1"):
716             def sigusr1(signum, frame):
717                 print '''Caught SIGUSR1, closing 'debug.log'...'''
718                 logfile.reopen()
719                 print '''...and reopened 'debug.log' after catching SIGUSR1.'''
720             signal.signal(signal.SIGUSR1, sigusr1)
721     
722     args.net = {
723         ('bitcoin', False): p2pool.Mainnet,
724         ('bitcoin', True): p2pool.Testnet,
725         ('namecoin', False): p2pool.NamecoinMainnet,
726         ('namecoin', True): p2pool.NamecoinTestnet,
727         ('ixcoin', False): p2pool.IxcoinMainnet,
728         ('ixcoin', True): p2pool.IxcoinTestnet,
729     }[args.net_name, args.testnet]
730     
731     if args.bitcoind_p2p_port is None:
732         args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
733     
734     if args.p2pool_port is None:
735         args.p2pool_port = args.net.P2P_PORT
736     
737     if args.address is not None:
738         try:
739             args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
740         except Exception, e:
741             raise ValueError('error parsing address: ' + repr(e))
742     else:
743         args.pubkey_hash = None
744     
745     reactor.callWhenRunning(main, args)
746     reactor.run()