group stales according to reason for being stale
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2
3 from __future__ import division
4
5 import argparse
6 import datetime
7 import itertools
8 import os
9 import random
10 import sqlite3
11 import struct
12 import sys
13 import time
14 import json
15 import signal
16 import traceback
17
18 from twisted.internet import defer, reactor, task, threads
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
22
23 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
24 from util import db, expiring_dict, jsonrpc, variable, deferral, math
25 from . import p2p, worker_interface, skiplists
26 import p2pool.data as p2pool
27 import p2pool as p2pool_init
28
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind, ht, net):
32     try:
33         work = yield bitcoind.rpc_getmemorypool()
34         defer.returnValue(dict(
35             version=work['version'],
36             previous_block_hash=int(work['previousblockhash'], 16),
37             transactions=[bitcoin.data.tx_type.unpack(x.decode('hex')) for x in work['transactions']],
38             subsidy=work['coinbasevalue'],
39             time=work['time'],
40             target=bitcoin.data.FloatingInteger(work['bits']),
41         ))
42     except jsonrpc.Error, e:
43         if e.code != -32601:
44             raise
45         
46         print "---> Update your bitcoind to support the 'getmemorypool' RPC call. Not including transactions in generated blocks! <---"
47         work = bitcoin.getwork.BlockAttempt.from_getwork((yield bitcoind.rpc_getwork()))
48         try:
49             subsidy = net.BITCOIN_SUBSIDY_FUNC(ht.getHeight(work.previous_block))
50         except ValueError:
51             subsidy = net.BITCOIN_SUBSIDY_FUNC(1000)
52         
53         defer.returnValue(dict(
54             version=work.version,
55             previous_block_hash=work.previous_block,
56             transactions=[],
57             subsidy=subsidy,
58             time=work.timestamp,
59             target=work.block_target,
60         ))
61
62 @deferral.retry('Error getting payout script from bitcoind:', 1)
63 @defer.inlineCallbacks
64 def get_payout_script(factory):
65     res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
66     if res['reply'] == 'success':
67         defer.returnValue(res['script'])
68     elif res['reply'] == 'denied':
69         defer.returnValue(None)
70     else:
71         raise ValueError('Unexpected reply: %r' % (res,))
72
73 @deferral.retry('Error creating payout script:', 10)
74 @defer.inlineCallbacks
75 def get_payout_script2(bitcoind, net):
76     defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
77
78 @defer.inlineCallbacks
79 def main(args):
80     try:
81         if args.charts:
82             from . import draw
83         
84         print 'p2pool (version %s)' % (p2pool_init.__version__,)
85         print
86         
87         # connect to bitcoind over JSON-RPC and do initial getwork
88         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
89         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
90         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
91         good = yield deferral.retry('Error while checking bitcoind identity:', 1)(args.net.BITCOIN_RPC_CHECK)(bitcoind)
92         if not good:
93             print "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
94             return
95         temp_work = yield deferral.retry('Error while testing getwork:', 1)(defer.inlineCallbacks(lambda: defer.returnValue(bitcoin.getwork.BlockAttempt.from_getwork((yield bitcoind.rpc_getwork())))))()
96         print '    ...success!'
97         print '    Current block hash: %x' % (temp_work.previous_block,)
98         print
99         
100         # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
101         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
102         factory = bitcoin.p2p.ClientFactory(args.net)
103         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
104         my_script = yield get_payout_script(factory)
105         if args.pubkey_hash is None:
106             if my_script is None:
107                 print '    IP transaction denied ... falling back to sending to address.'
108                 my_script = yield get_payout_script2(bitcoind, args.net)
109         else:
110             my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
111         print '    ...success!'
112         print '    Payout script:', bitcoin.data.script2_to_human(my_script, args.net)
113         print
114         
115         print 'Loading cached block headers...'
116         ht = bitcoin.p2p.HeightTracker(factory, args.net.NAME + '_headers.dat')
117         print '   ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
118         print
119         
120         tracker = p2pool.OkayTracker(args.net)
121         ss = p2pool.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.NAME + '_shares.'), args.net)
122         known_verified = set()
123         print "Loading shares..."
124         for i, (mode, contents) in enumerate(ss.get_shares()):
125             if mode == 'share':
126                 if contents.hash in tracker.shares:
127                     continue
128                 contents.shared = True
129                 contents.stored = True
130                 contents.time_seen = 0
131                 tracker.add(contents)
132                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
133                     print "    %i" % (len(tracker.shares),)
134             elif mode == 'verified_hash':
135                 known_verified.add(contents)
136             else:
137                 raise AssertionError()
138         print "    ...inserting %i verified shares..." % (len(known_verified),)
139         for h in known_verified:
140             if h not in tracker.shares:
141                 ss.forget_verified_share(h)
142                 continue
143             tracker.verified.add(tracker.shares[h])
144         print "    ...done loading %i shares!" % (len(tracker.shares),)
145         print
146         tracker.added.watch(lambda share: ss.add_share(share))
147         tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
148         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
149         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
150         
151         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
152         
153         # information affecting work that should trigger a long-polling update
154         current_work = variable.Variable(None)
155         # information affecting work that should not trigger a long-polling update
156         current_work2 = variable.Variable(None)
157         
158         work_updated = variable.Event()
159         
160         requested = expiring_dict.ExpiringDict(300)
161         
162         @defer.inlineCallbacks
163         def set_real_work1():
164             work = yield getwork(bitcoind, ht, args.net)
165             changed = work['previous_block_hash'] != current_work.value['previous_block'] if current_work.value is not None else True
166             current_work.set(dict(
167                 version=work['version'],
168                 previous_block=work['previous_block_hash'],
169                 target=work['target'],
170                 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
171             ))
172             current_work2.set(dict(
173                 transactions=work['transactions'],
174                 subsidy=work['subsidy'],
175                 clock_offset=time.time() - work['time'],
176                 last_update=time.time(),
177             ))
178             if changed:
179                 set_real_work2()
180         
181         def set_real_work2():
182             best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
183             
184             t = dict(current_work.value)
185             t['best_share_hash'] = best
186             current_work.set(t)
187             
188             t = time.time()
189             for peer2, share_hash in desired:
190                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
191                     continue
192                 last_request_time, count = requested.get(share_hash, (None, 0))
193                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
194                     continue
195                 potential_peers = set()
196                 for head in tracker.tails[share_hash]:
197                     potential_peers.update(peer_heads.get(head, set()))
198                 potential_peers = [peer for peer in potential_peers if peer.connected2]
199                 if count == 0 and peer2 is not None and peer2.connected2:
200                     peer = peer2
201                 else:
202                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
203                     if peer is None:
204                         continue
205                 
206                 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
207                 peer.send_getshares(
208                     hashes=[share_hash],
209                     parents=2000,
210                     stops=list(set(tracker.heads) | set(
211                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
212                     ))[:100],
213                 )
214                 requested[share_hash] = t, count + 1
215         
216         print 'Initializing work...'
217         yield set_real_work1()
218         set_real_work2()
219         print '    ...success!'
220         print
221         
222         start_time = time.time() - current_work2.value['clock_offset']
223         
224         # setup p2p logic and join p2pool network
225         
226         def share_share(share, ignore_peer=None):
227             for peer in p2p_node.peers.itervalues():
228                 if peer is ignore_peer:
229                     continue
230                 #if p2pool_init.DEBUG:
231                 #    print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
232                 peer.send_shares([share])
233             share.flag_shared()
234         
235         def p2p_shares(shares, peer=None):
236             if len(shares) > 5:
237                 print 'Processing %i shares...' % (len(shares),)
238             
239             new_count = 0
240             for share in shares:
241                 if share.hash in tracker.shares:
242                     #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
243                     continue
244                 
245                 new_count += 1
246                 
247                 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
248                 
249                 tracker.add(share)
250             
251             if shares and peer is not None:
252                 peer_heads.setdefault(shares[0].hash, set()).add(peer)
253             
254             if new_count:
255                 set_real_work2()
256             
257             if len(shares) > 5:
258                 print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*args.net.CHAIN_LENGTH)
259         
260         @tracker.verified.added.watch
261         def _(share):
262             if share.bitcoin_hash <= share.header['target']:
263                 print
264                 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
265                 print
266                 if factory.conn.value is not None:
267                     factory.conn.value.send_block(block=share.as_block(tracker, args.net))
268                 else:
269                     print 'No bitcoind connection! Erp!'
270         
271         def p2p_share_hashes(share_hashes, peer):
272             t = time.time()
273             get_hashes = []
274             for share_hash in share_hashes:
275                 if share_hash in tracker.shares:
276                     continue
277                 last_request_time, count = requested.get(share_hash, (None, 0))
278                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
279                     continue
280                 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
281                 get_hashes.append(share_hash)
282                 requested[share_hash] = t, count + 1
283             
284             if share_hashes and peer is not None:
285                 peer_heads.setdefault(share_hashes[0], set()).add(peer)
286             if get_hashes:
287                 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
288         
289         def p2p_get_shares(share_hashes, parents, stops, peer):
290             parents = min(parents, 1000//len(share_hashes))
291             stops = set(stops)
292             shares = []
293             for share_hash in share_hashes:
294                 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
295                     if share.hash in stops:
296                         break
297                     shares.append(share)
298             print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
299             peer.send_shares(shares, full=True)
300         
301         print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
302         
303         def parse(x):
304             if ':' in x:
305                 ip, port = x.split(':')
306                 return ip, int(port)
307             else:
308                 return x, args.net.P2P_PORT
309         
310         nodes = set([
311             ('72.14.191.28', args.net.P2P_PORT),
312             ('62.204.197.159', args.net.P2P_PORT),
313             ('142.58.248.28', args.net.P2P_PORT),
314             ('94.23.34.145', args.net.P2P_PORT),
315         ])
316         for host in [
317             'p2pool.forre.st',
318             'dabuttonfactory.com',
319         ]:
320             try:
321                 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
322             except:
323                 log.err(None, 'Error resolving bootstrap node IP:')
324         
325         p2p_node = p2p.Node(
326             current_work=current_work,
327             port=args.p2pool_port,
328             net=args.net,
329             addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.NAME),
330             mode=0 if args.low_bandwidth else 1,
331             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
332         )
333         p2p_node.handle_shares = p2p_shares
334         p2p_node.handle_share_hashes = p2p_share_hashes
335         p2p_node.handle_get_shares = p2p_get_shares
336         
337         p2p_node.start()
338         
339         # send share when the chain changes to their chain
340         def work_changed(new_work):
341             #print 'Work changed:', new_work
342             for share in tracker.get_chain_known(new_work['best_share_hash']):
343                 if share.shared:
344                     break
345                 share_share(share, share.peer)
346         current_work.changed.watch(work_changed)
347         
348         print '    ...success!'
349         print
350         
351         @defer.inlineCallbacks
352         def upnp_thread():
353             while True:
354                 try:
355                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
356                     if is_lan:
357                         pm = yield portmapper.get_port_mapper()
358                         yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
359                 except defer.TimeoutError:
360                     pass
361                 except:
362                     if p2pool_init.DEBUG:
363                         log.err(None, "UPnP error:")
364                 yield deferral.sleep(random.expovariate(1/120))
365         
366         if args.upnp:
367             upnp_thread()
368         
369         # start listening for workers with a JSON-RPC server
370         
371         print 'Listening for workers on port %i...' % (args.worker_port,)
372         
373         # setup worker logic
374         
375         merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
376         run_identifier = struct.pack('<Q', random.randrange(2**64))
377         
378         share_counter = skiplists.CountsSkipList(tracker, run_identifier)
379         removed_unstales = set()
380         def get_share_counts(doa=False):
381             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
382             matching_in_chain = share_counter(current_work.value['best_share_hash'], max(0, height - 1)) | removed_unstales
383             shares_in_chain = my_shares & matching_in_chain
384             stale_shares = my_shares - matching_in_chain
385             if doa:
386                 stale_doa_shares = stale_shares & doa_shares
387                 stale_not_doa_shares = stale_shares - stale_doa_shares
388                 return len(shares_in_chain) + len(stale_shares), len(stale_doa_shares), len(stale_not_doa_shares)
389             return len(shares_in_chain) + len(stale_shares), len(stale_shares)
390         @tracker.verified.removed.watch
391         def _(share):
392             if share.hash in my_shares and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
393                 removed_unstales.add(share.hash)
394         
395         def compute(state, payout_script):
396             if payout_script is None:
397                 payout_script = my_script
398             if state['best_share_hash'] is None and args.net.PERSIST:
399                 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
400             if len(p2p_node.peers) == 0 and args.net.PERSIST:
401                 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
402             if time.time() > current_work2.value['last_update'] + 60:
403                 raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
404             
405             # XXX assuming generate_tx is smallish here..
406             def get_stale_frac():
407                 shares, stale_shares = get_share_counts()
408                 if shares == 0:
409                     return ""
410                 frac = stale_shares/shares
411                 return 2*struct.pack('<H', int(65535*frac + .5))
412             subsidy = current_work2.value['subsidy']
413             generate_tx = p2pool.generate_transaction(
414                 tracker=tracker,
415                 previous_share_hash=state['best_share_hash'],
416                 new_script=payout_script,
417                 subsidy=subsidy,
418                 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)) + get_stale_frac(),
419                 block_target=state['target'],
420                 net=args.net,
421             )
422             print 'New work for worker! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], (generate_tx['tx_outs'][-1]['value']-subsidy//200)*1e-8, args.net.BITCOIN_SYMBOL, subsidy*1e-8, args.net.BITCOIN_SYMBOL, len(current_work2.value['transactions']))
423             #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
424             #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
425             transactions = [generate_tx] + list(current_work2.value['transactions'])
426             merkle_root = bitcoin.data.merkle_hash(transactions)
427             merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
428             
429             timestamp = int(time.time() - current_work2.value['clock_offset'])
430             if state['best_share_hash'] is not None:
431                 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
432                 if timestamp2 > timestamp:
433                     print 'Toff', timestamp2 - timestamp
434                     timestamp = timestamp2
435             target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
436             times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
437             #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
438             return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
439         
440         my_shares = set()
441         doa_shares = set()
442         times = {}
443         
444         def got_response(data, user):
445             try:
446                 # match up with transactions
447                 header = bitcoin.getwork.decode_data(data)
448                 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
449                 if transactions is None:
450                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
451                     return False
452                 block = dict(header=header, txs=transactions)
453                 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
454                 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
455                     if factory.conn.value is not None:
456                         factory.conn.value.send_block(block=block)
457                     else:
458                         print 'No bitcoind connection! Erp!'
459                     if hash_ <= block['header']['target']:
460                         print
461                         print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
462                         print
463                 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
464                 if hash_ > target:
465                     print 'Worker submitted share with hash > target:\nhash  : %x\ntarget: %x' % (hash_, target)
466                     return False
467                 share = p2pool.Share.from_block(block)
468                 my_shares.add(share.hash)
469                 if share.previous_hash != current_work.value['best_share_hash']:
470                     doa_shares.add(share.hash)
471                 print 'GOT SHARE! %s %s prev %s age %.2fs' % (user, p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
472                 good = share.previous_hash == current_work.value['best_share_hash']
473                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
474                 p2p_shares([share])
475                 # eg. good = share.hash == current_work.value['best_share_hash'] here
476                 return good
477             except:
478                 log.err(None, 'Error processing data received from worker:')
479                 return False
480         
481         web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
482         
483         def get_rate():
484             if current_work.value['best_share_hash'] is not None:
485                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
486                 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 720))
487                 fracs = [read_stale_frac(share) for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if read_stale_frac(share) is not None]
488                 return json.dumps(int(att_s / (1. - (math.median(fracs) if fracs else 0))))
489             return json.dumps(None)
490         
491         def get_users():
492             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
493             weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
494             res = {}
495             for script in sorted(weights, key=lambda s: weights[s]):
496                 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
497             return json.dumps(res)
498         
499         class WebInterface(resource.Resource):
500             def __init__(self, func, mime_type):
501                 self.func, self.mime_type = func, mime_type
502             
503             def render_GET(self, request):
504                 request.setHeader('Content-Type', self.mime_type)
505                 return self.func()
506         
507         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
508         web_root.putChild('users', WebInterface(get_users, 'application/json'))
509         if args.charts:
510             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
511         
512         reactor.listenTCP(args.worker_port, server.Site(web_root))
513         
514         print '    ...success!'
515         print
516         
517         # done!
518         
519         # do new getwork when a block is heard on the p2p interface
520         
521         def new_block(block_hash):
522             work_updated.happened()
523         factory.new_block.watch(new_block)
524         
525         print 'Started successfully!'
526         print
527         
528         ht.updated.watch(set_real_work2)
529         
530         @defer.inlineCallbacks
531         def work1_thread():
532             while True:
533                 flag = work_updated.get_deferred()
534                 try:
535                     yield set_real_work1()
536                 except:
537                     log.err()
538                 yield defer.DeferredList([flag, deferral.sleep(random.uniform(1, 10))], fireOnOneCallback=True)
539         
540         @defer.inlineCallbacks
541         def work2_thread():
542             while True:
543                 try:
544                     set_real_work2()
545                 except:
546                     log.err()
547                 yield deferral.sleep(random.expovariate(1/20))
548         
549         work1_thread()
550         work2_thread()
551         
552         
553         if hasattr(signal, 'SIGALRM'):
554             def watchdog_handler(signum, frame):
555                 print 'Watchdog timer went off at:'
556                 traceback.print_stack()
557             
558             signal.signal(signal.SIGALRM, watchdog_handler)
559             task.LoopingCall(signal.alarm, 30).start(1)
560         
561         
562         def read_stale_frac(share):
563             if len(share.nonce) != 20:
564                 return None
565             a, b = struct.unpack("<HH", share.nonce[-4:])
566             if a != b:
567                 return None
568             return a/65535
569         
570         while True:
571             yield deferral.sleep(3)
572             try:
573                 if time.time() > current_work2.value['last_update'] + 60:
574                     print '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
575                 if current_work.value['best_share_hash'] is not None:
576                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
577                     if height > 2:
578                         att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 720))
579                         weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**100)
580                         shares, stale_doa_shares, stale_not_doa_shares = get_share_counts(True)
581                         stale_shares = stale_doa_shares + stale_not_doa_shares
582                         fracs = [read_stale_frac(share) for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if read_stale_frac(share) is not None]
583                         print 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i' % (
584                             math.format(int(att_s / (1. - (math.median(fracs) if fracs else 0)))),
585                             height,
586                             len(tracker.verified.shares),
587                             len(tracker.shares),
588                             weights.get(my_script, 0)/total_weight*100,
589                             math.format(int(weights.get(my_script, 0)*att_s//total_weight / (1. - (math.median(fracs) if fracs else 0)))),
590                             shares,
591                             stale_not_doa_shares,
592                             stale_doa_shares,
593                             len(p2p_node.peers),
594                         ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
595                         if fracs:
596                             med = math.median(fracs)
597                             print 'Median stale proportion:', med
598                             if shares:
599                                 print '    Own:', stale_shares/shares
600                                 if med < .99:
601                                     print '    Own efficiency: %.02f%%' % (100*(1 - stale_shares/shares)/(1 - med),)
602                             
603                             
604             except:
605                 log.err()
606     except:
607         log.err(None, 'Fatal error:')
608     finally:
609         reactor.stop()
610
611 def run():
612     class FixedArgumentParser(argparse.ArgumentParser):
613         def _read_args_from_files(self, arg_strings):
614             # expand arguments referencing files
615             new_arg_strings = []
616             for arg_string in arg_strings:
617                 
618                 # for regular arguments, just add them back into the list
619                 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
620                     new_arg_strings.append(arg_string)
621                 
622                 # replace arguments referencing files with the file content
623                 else:
624                     try:
625                         args_file = open(arg_string[1:])
626                         try:
627                             arg_strings = []
628                             for arg_line in args_file.read().splitlines():
629                                 for arg in self.convert_arg_line_to_args(arg_line):
630                                     arg_strings.append(arg)
631                             arg_strings = self._read_args_from_files(arg_strings)
632                             new_arg_strings.extend(arg_strings)
633                         finally:
634                             args_file.close()
635                     except IOError:
636                         err = sys.exc_info()[1]
637                         self.error(str(err))
638             
639             # return the modified argument list
640             return new_arg_strings
641     
642     parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,), fromfile_prefix_chars='@')
643     parser.convert_arg_line_to_args = lambda arg_line: (arg for arg in arg_line.split() if arg.strip())
644     parser.add_argument('--version', action='version', version=p2pool_init.__version__)
645     parser.add_argument('--net',
646         help='use specified network (default: bitcoin)',
647         action='store', choices=sorted(x for x in p2pool.nets if 'testnet' not in x), default='bitcoin', dest='net_name')
648     parser.add_argument('--testnet',
649         help='''use the network's testnet''',
650         action='store_const', const=True, default=False, dest='testnet')
651     parser.add_argument('--debug',
652         help='debugging mode',
653         action='store_const', const=True, default=False, dest='debug')
654     parser.add_argument('-a', '--address',
655         help='generate to this address (defaults to requesting one from bitcoind)',
656         type=str, action='store', default=None, dest='address')
657     parser.add_argument('--charts',
658         help='generate charts on the web interface (requires PIL and pygame)',
659         action='store_const', const=True, default=False, dest='charts')
660     parser.add_argument('--logfile',
661         help='''log to specific file (defaults to <network_name>.log in run_p2pool.py's directory)''',
662         type=str, action='store', default=None, dest='logfile')
663     
664     p2pool_group = parser.add_argument_group('p2pool interface')
665     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
666         help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
667         type=int, action='store', default=None, dest='p2pool_port')
668     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
669         help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
670         type=str, action='append', default=[], dest='p2pool_nodes')
671     parser.add_argument('-l', '--low-bandwidth',
672         help='trade lower bandwidth usage for higher latency (reduced efficiency)',
673         action='store_true', default=False, dest='low_bandwidth')
674     parser.add_argument('--disable-upnp',
675         help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
676         action='store_false', default=True, dest='upnp')
677     
678     worker_group = parser.add_argument_group('worker interface')
679     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
680         help='listen on PORT for RPC connections from miners asking for work and providing responses (default: bitcoin: 9332 namecoin: 9331 ixcoin: 9330 i0coin: 9329, +10000 for testnets)',
681         type=int, action='store', default=None, dest='worker_port')
682     
683     bitcoind_group = parser.add_argument_group('bitcoind interface')
684     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
685         help='connect to a bitcoind at this address (default: 127.0.0.1)',
686         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
687     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
688         help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332, 8338 for ixcoin)',
689         type=int, action='store', default=None, dest='bitcoind_rpc_port')
690     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
691         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
692         type=int, action='store', default=None, dest='bitcoind_p2p_port')
693     
694     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSER',
695         help='bitcoind RPC interface username (default: empty)',
696         type=str, action='store', default='', nargs='?', dest='bitcoind_rpc_username')
697     bitcoind_group.add_argument(metavar='BITCOIND_RPCPASSWORD',
698         help='bitcoind RPC interface password',
699         type=str, action='store', dest='bitcoind_rpc_password')
700     
701     args = parser.parse_args()
702     
703     if args.debug:
704         p2pool_init.DEBUG = True
705     
706     if args.logfile is None:
707        args.logfile = os.path.join(os.path.dirname(sys.argv[0]), args.net_name + ('_testnet' if args.testnet else '') + '.log')
708     
709     class LogFile(object):
710         def __init__(self, filename):
711             self.filename = filename
712             self.inner_file = None
713             self.reopen()
714         def reopen(self):
715             if self.inner_file is not None:
716                 self.inner_file.close()
717             open(self.filename, 'a').close()
718             f = open(self.filename, 'rb')
719             f.seek(0, os.SEEK_END)
720             length = f.tell()
721             if length > 100*1000*1000:
722                 f.seek(-1000*1000, os.SEEK_END)
723                 while True:
724                     if f.read(1) in ('', '\n'):
725                         break
726                 data = f.read()
727                 f.close()
728                 f = open(self.filename, 'wb')
729                 f.write(data)
730             f.close()
731             self.inner_file = open(self.filename, 'a')
732         def write(self, data):
733             self.inner_file.write(data)
734         def flush(self):
735             self.inner_file.flush()
736     class TeePipe(object):
737         def __init__(self, outputs):
738             self.outputs = outputs
739         def write(self, data):
740             for output in self.outputs:
741                 output.write(data)
742         def flush(self):
743             for output in self.outputs:
744                 output.flush()
745     class TimestampingPipe(object):
746         def __init__(self, inner_file):
747             self.inner_file = inner_file
748             self.buf = ''
749             self.softspace = 0
750         def write(self, data):
751             buf = self.buf + data
752             lines = buf.split('\n')
753             for line in lines[:-1]:
754                 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
755                 self.inner_file.flush()
756             self.buf = lines[-1]
757         def flush(self):
758             pass
759     logfile = LogFile(args.logfile)
760     sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
761     if hasattr(signal, "SIGUSR1"):
762         def sigusr1(signum, frame):
763             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
764             logfile.reopen()
765             print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
766         signal.signal(signal.SIGUSR1, sigusr1)
767     task.LoopingCall(logfile.reopen).start(5)
768     
769     args.net = p2pool.nets[args.net_name + ('_testnet' if args.testnet else '')]
770     
771     if args.bitcoind_rpc_port is None:
772         args.bitcoind_rpc_port = args.net.BITCOIN_RPC_PORT
773     
774     if args.bitcoind_p2p_port is None:
775         args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
776     
777     if args.p2pool_port is None:
778         args.p2pool_port = args.net.P2P_PORT
779     
780     if args.worker_port is None:
781         args.worker_port = args.net.WORKER_PORT
782     
783     if args.address is not None:
784         try:
785             args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
786         except Exception, e:
787             raise ValueError('error parsing address: ' + repr(e))
788     else:
789         args.pubkey_hash = None
790     
791     reactor.callWhenRunning(main, args)
792     reactor.run()