get testnet working again
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2
3 from __future__ import division
4
5 import argparse
6 import datetime
7 import itertools
8 import os
9 import random
10 import sqlite3
11 import struct
12 import sys
13 import time
14 import json
15 import signal
16 import traceback
17
18 from twisted.internet import defer, reactor, task, threads
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
22
23 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
24 from util import db, expiring_dict, jsonrpc, variable, deferral, math
25 from . import p2p, worker_interface, skiplists
26 import p2pool.data as p2pool
27 import p2pool as p2pool_init
28
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind, ht, net):
32     try:
33         work = yield bitcoind.rpc_getmemorypool()
34         defer.returnValue(dict(
35             version=work['version'],
36             previous_block_hash=int(work['previousblockhash'], 16),
37             transactions=[bitcoin.data.tx_type.unpack(x.decode('hex')) for x in work['transactions']],
38             subsidy=work['coinbasevalue'],
39             time=work['time'],
40             target=bitcoin.data.FloatingInteger(work['bits']),
41         ))
42     except jsonrpc.Error, e:
43         if e.code != -32601:
44             raise
45         
46         print "---> Update your bitcoind to support the 'getmemorypool' RPC call. Not including transactions in generated blocks! <---"
47         work = bitcoin.getwork.BlockAttempt.from_getwork((yield bitcoind.rpc_getwork()))
48         try:
49             subsidy = net.BITCOIN_SUBSIDY_FUNC(ht.getHeight(work.previous_block))
50         except ValueError:
51             subsidy = net.BITCOIN_SUBSIDY_FUNC(1000)
52         
53         defer.returnValue(dict(
54             version=work.version,
55             previous_block_hash=work.previous_block,
56             transactions=[],
57             subsidy=subsidy,
58             time=work.timestamp,
59             target=work.block_target,
60         ))
61
62 @deferral.retry('Error getting payout script from bitcoind:', 1)
63 @defer.inlineCallbacks
64 def get_payout_script(factory):
65     res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
66     if res['reply'] == 'success':
67         defer.returnValue(res['script'])
68     elif res['reply'] == 'denied':
69         defer.returnValue(None)
70     else:
71         raise ValueError('Unexpected reply: %r' % (res,))
72
73 @deferral.retry('Error creating payout script:', 10)
74 @defer.inlineCallbacks
75 def get_payout_script2(bitcoind, net):
76     defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
77
78 @defer.inlineCallbacks
79 def main(args):
80     try:
81         if args.charts:
82             from . import draw
83         
84         print 'p2pool (version %s)' % (p2pool_init.__version__,)
85         print
86         
87         # connect to bitcoind over JSON-RPC and do initial getwork
88         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
89         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
90         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
91         good = yield deferral.retry('Error while checking bitcoind identity:', 1)(args.net.BITCOIN_RPC_CHECK)(bitcoind)
92         if not good:
93             print "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
94             return
95         temp_work = yield deferral.retry('Error while testing getwork:', 1)(defer.inlineCallbacks(lambda: defer.returnValue(bitcoin.getwork.BlockAttempt.from_getwork((yield bitcoind.rpc_getwork())))))()
96         print '    ...success!'
97         print '    Current block hash: %x' % (temp_work.previous_block,)
98         print
99         
100         # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
101         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
102         factory = bitcoin.p2p.ClientFactory(args.net)
103         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
104         my_script = yield get_payout_script(factory)
105         if args.pubkey_hash is None:
106             if my_script is None:
107                 print '    IP transaction denied ... falling back to sending to address.'
108                 my_script = yield get_payout_script2(bitcoind, args.net)
109         else:
110             my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
111         print '    ...success!'
112         print '    Payout script:', bitcoin.data.script2_to_human(my_script, args.net)
113         print
114         
115         print 'Loading cached block headers...'
116         ht = bitcoin.p2p.HeightTracker(factory, args.net.NAME + '_headers.dat')
117         print '   ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
118         print
119         
120         tracker = p2pool.OkayTracker(args.net)
121         ss = p2pool.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.NAME + '_shares.'), args.net)
122         known_verified = set()
123         print "Loading shares..."
124         for i, (mode, contents) in enumerate(ss.get_shares()):
125             if mode == 'share':
126                 if contents.hash in tracker.shares:
127                     continue
128                 contents.shared = True
129                 contents.stored = True
130                 contents.time_seen = 0
131                 tracker.add(contents)
132                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
133                     print "    %i" % (len(tracker.shares),)
134             elif mode == 'verified_hash':
135                 known_verified.add(contents)
136             else:
137                 raise AssertionError()
138         print "    ...inserting %i verified shares..." % (len(known_verified),)
139         for h in known_verified:
140             if h not in tracker.shares:
141                 ss.forget_verified_share(h)
142                 continue
143             tracker.verified.add(tracker.shares[h])
144         print "    ...done loading %i shares!" % (len(tracker.shares),)
145         print
146         tracker.added.watch(lambda share: ss.add_share(share))
147         tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
148         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
149         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
150         
151         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
152         
153         # information affecting work that should trigger a long-polling update
154         current_work = variable.Variable(None)
155         # information affecting work that should not trigger a long-polling update
156         current_work2 = variable.Variable(None)
157         
158         work_updated = variable.Event()
159         
160         requested = expiring_dict.ExpiringDict(300)
161         
162         @defer.inlineCallbacks
163         def set_real_work1():
164             work = yield getwork(bitcoind, ht, args.net)
165             changed = work['previous_block_hash'] != current_work.value['previous_block'] if current_work.value is not None else True
166             current_work.set(dict(
167                 version=work['version'],
168                 previous_block=work['previous_block_hash'],
169                 target=work['target'],
170                 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
171             ))
172             current_work2.set(dict(
173                 transactions=work['transactions'],
174                 subsidy=work['subsidy'],
175                 clock_offset=time.time() - work['time'],
176                 last_update=time.time(),
177             ))
178             if changed:
179                 set_real_work2()
180         
181         def set_real_work2():
182             best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
183             
184             t = dict(current_work.value)
185             t['best_share_hash'] = best
186             current_work.set(t)
187             
188             t = time.time()
189             for peer2, share_hash in desired:
190                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
191                     continue
192                 last_request_time, count = requested.get(share_hash, (None, 0))
193                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
194                     continue
195                 potential_peers = set()
196                 for head in tracker.tails[share_hash]:
197                     potential_peers.update(peer_heads.get(head, set()))
198                 potential_peers = [peer for peer in potential_peers if peer.connected2]
199                 if count == 0 and peer2 is not None and peer2.connected2:
200                     peer = peer2
201                 else:
202                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
203                     if peer is None:
204                         continue
205                 
206                 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
207                 peer.send_getshares(
208                     hashes=[share_hash],
209                     parents=2000,
210                     stops=list(set(tracker.heads) | set(
211                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
212                     ))[:100],
213                 )
214                 requested[share_hash] = t, count + 1
215         
216         print 'Initializing work...'
217         yield set_real_work1()
218         set_real_work2()
219         print '    ...success!'
220         print
221         
222         start_time = time.time() - current_work2.value['clock_offset']
223         
224         # setup p2p logic and join p2pool network
225         
226         def share_share(share, ignore_peer=None):
227             for peer in p2p_node.peers.itervalues():
228                 if peer is ignore_peer:
229                     continue
230                 #if p2pool_init.DEBUG:
231                 #    print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
232                 peer.send_shares([share])
233             share.flag_shared()
234         
235         def p2p_shares(shares, peer=None):
236             if len(shares) > 5:
237                 print 'Processing %i shares...' % (len(shares),)
238             
239             new_count = 0
240             for share in shares:
241                 if share.hash in tracker.shares:
242                     #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
243                     continue
244                 
245                 new_count += 1
246                 
247                 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
248                 
249                 tracker.add(share)
250             
251             if shares and peer is not None:
252                 peer_heads.setdefault(shares[0].hash, set()).add(peer)
253             
254             if new_count:
255                 set_real_work2()
256             
257             if len(shares) > 5:
258                 print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*args.net.CHAIN_LENGTH)
259         
260         @tracker.verified.added.watch
261         def _(share):
262             if share.bitcoin_hash <= share.header['target']:
263                 print
264                 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
265                 print
266                 if factory.conn.value is not None:
267                     factory.conn.value.send_block(block=share.as_block(tracker, args.net))
268                 else:
269                     print 'No bitcoind connection! Erp!'
270         
271         def p2p_share_hashes(share_hashes, peer):
272             t = time.time()
273             get_hashes = []
274             for share_hash in share_hashes:
275                 if share_hash in tracker.shares:
276                     continue
277                 last_request_time, count = requested.get(share_hash, (None, 0))
278                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
279                     continue
280                 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
281                 get_hashes.append(share_hash)
282                 requested[share_hash] = t, count + 1
283             
284             if share_hashes and peer is not None:
285                 peer_heads.setdefault(share_hashes[0], set()).add(peer)
286             if get_hashes:
287                 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
288         
289         def p2p_get_shares(share_hashes, parents, stops, peer):
290             parents = min(parents, 1000//len(share_hashes))
291             stops = set(stops)
292             shares = []
293             for share_hash in share_hashes:
294                 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
295                     if share.hash in stops:
296                         break
297                     shares.append(share)
298             print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
299             peer.send_shares(shares, full=True)
300         
301         print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
302         
303         def parse(x):
304             if ':' in x:
305                 ip, port = x.split(':')
306                 return ip, int(port)
307             else:
308                 return x, args.net.P2P_PORT
309         
310         nodes = set([
311             ('72.14.191.28', args.net.P2P_PORT),
312             ('62.204.197.159', args.net.P2P_PORT),
313             ('142.58.248.28', args.net.P2P_PORT),
314             ('94.23.34.145', args.net.P2P_PORT),
315         ])
316         for host in [
317             'p2pool.forre.st',
318             'dabuttonfactory.com',
319         ]:
320             try:
321                 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
322             except:
323                 log.err(None, 'Error resolving bootstrap node IP:')
324         
325         p2p_node = p2p.Node(
326             current_work=current_work,
327             port=args.p2pool_port,
328             net=args.net,
329             addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.NAME),
330             mode=0 if args.low_bandwidth else 1,
331             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
332         )
333         p2p_node.handle_shares = p2p_shares
334         p2p_node.handle_share_hashes = p2p_share_hashes
335         p2p_node.handle_get_shares = p2p_get_shares
336         
337         p2p_node.start()
338         
339         # send share when the chain changes to their chain
340         def work_changed(new_work):
341             #print 'Work changed:', new_work
342             for share in tracker.get_chain_known(new_work['best_share_hash']):
343                 if share.shared:
344                     break
345                 share_share(share, share.peer)
346         current_work.changed.watch(work_changed)
347         
348         print '    ...success!'
349         print
350         
351         @defer.inlineCallbacks
352         def upnp_thread():
353             while True:
354                 try:
355                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
356                     if is_lan:
357                         pm = yield portmapper.get_port_mapper()
358                         yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
359                 except:
360                     if p2pool_init.DEBUG:
361                         log.err(None, "UPnP error:")
362                 yield deferral.sleep(random.expovariate(1/120))
363         
364         if args.upnp:
365             upnp_thread()
366         
367         # start listening for workers with a JSON-RPC server
368         
369         print 'Listening for workers on port %i...' % (args.worker_port,)
370         
371         # setup worker logic
372         
373         merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
374         run_identifier = struct.pack('<Q', random.randrange(2**64))
375         
376         share_counter = skiplists.CountsSkipList(tracker, run_identifier)
377         removed_unstales = set()
378         def get_share_counts():
379             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
380             matching_in_chain = share_counter(current_work.value['best_share_hash'], max(0, height - 1)) | removed_unstales
381             shares_in_chain = my_shares & matching_in_chain
382             stale_shares = my_shares - matching_in_chain
383             return len(shares_in_chain) + len(stale_shares), len(stale_shares)
384         @tracker.verified.removed.watch
385         def _(share):
386             if share.hash in my_shares and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
387                 removed_unstales.add(share.hash)
388         
389         def compute(state, payout_script):
390             if payout_script is None:
391                 payout_script = my_script
392             if state['best_share_hash'] is None and args.net.PERSIST:
393                 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
394             if len(p2p_node.peers) == 0 and args.net.PERSIST:
395                 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
396             if time.time() > current_work2.value['last_update'] + 60:
397                 raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
398             
399             # XXX assuming generate_tx is smallish here..
400             def get_stale_frac():
401                 shares, stale_shares = get_share_counts()
402                 if shares == 0:
403                     return ""
404                 frac = stale_shares/shares
405                 return 2*struct.pack('<H', int(65535*frac + .5))
406             subsidy = current_work2.value['subsidy']
407             generate_tx = p2pool.generate_transaction(
408                 tracker=tracker,
409                 previous_share_hash=state['best_share_hash'],
410                 new_script=payout_script,
411                 subsidy=subsidy,
412                 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)) + get_stale_frac(),
413                 block_target=state['target'],
414                 net=args.net,
415             )
416             print 'New work for worker! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], (generate_tx['tx_outs'][-1]['value']-subsidy//200)*1e-8, args.net.BITCOIN_SYMBOL, subsidy*1e-8, args.net.BITCOIN_SYMBOL, len(current_work2.value['transactions']))
417             #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
418             #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
419             transactions = [generate_tx] + list(current_work2.value['transactions'])
420             merkle_root = bitcoin.data.merkle_hash(transactions)
421             merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
422             
423             timestamp = int(time.time() - current_work2.value['clock_offset'])
424             if state['best_share_hash'] is not None:
425                 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
426                 if timestamp2 > timestamp:
427                     print 'Toff', timestamp2 - timestamp
428                     timestamp = timestamp2
429             target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
430             times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
431             #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
432             return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
433         
434         my_shares = set()
435         times = {}
436         
437         def got_response(data, user):
438             try:
439                 # match up with transactions
440                 header = bitcoin.getwork.decode_data(data)
441                 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
442                 if transactions is None:
443                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
444                     return False
445                 block = dict(header=header, txs=transactions)
446                 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
447                 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
448                     if factory.conn.value is not None:
449                         factory.conn.value.send_block(block=block)
450                     else:
451                         print 'No bitcoind connection! Erp!'
452                     if hash_ <= block['header']['target']:
453                         print
454                         print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
455                         print
456                 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
457                 if hash_ > target:
458                     print 'Worker submitted share with hash > target:\nhash  : %x\ntarget: %x' % (hash_, target)
459                     return False
460                 share = p2pool.Share.from_block(block)
461                 my_shares.add(share.hash)
462                 print 'GOT SHARE! %s %s prev %s age %.2fs' % (user, p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
463                 good = share.previous_hash == current_work.value['best_share_hash']
464                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
465                 p2p_shares([share])
466                 # eg. good = share.hash == current_work.value['best_share_hash'] here
467                 return good
468             except:
469                 log.err(None, 'Error processing data received from worker:')
470                 return False
471         
472         web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
473         
474         def get_rate():
475             if current_work.value['best_share_hash'] is not None:
476                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
477                 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 720))
478                 return json.dumps(att_s)
479             return json.dumps(None)
480         
481         def get_users():
482             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
483             weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
484             res = {}
485             for script in sorted(weights, key=lambda s: weights[s]):
486                 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
487             return json.dumps(res)
488         
489         class WebInterface(resource.Resource):
490             def __init__(self, func, mime_type):
491                 self.func, self.mime_type = func, mime_type
492             
493             def render_GET(self, request):
494                 request.setHeader('Content-Type', self.mime_type)
495                 return self.func()
496         
497         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
498         web_root.putChild('users', WebInterface(get_users, 'application/json'))
499         if args.charts:
500             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
501         
502         reactor.listenTCP(args.worker_port, server.Site(web_root))
503         
504         print '    ...success!'
505         print
506         
507         # done!
508         
509         # do new getwork when a block is heard on the p2p interface
510         
511         def new_block(block_hash):
512             work_updated.happened()
513         factory.new_block.watch(new_block)
514         
515         print 'Started successfully!'
516         print
517         
518         ht.updated.watch(set_real_work2)
519         
520         @defer.inlineCallbacks
521         def work1_thread():
522             while True:
523                 flag = work_updated.get_deferred()
524                 try:
525                     yield set_real_work1()
526                 except:
527                     log.err()
528                 yield defer.DeferredList([flag, deferral.sleep(random.uniform(1, 10))], fireOnOneCallback=True)
529         
530         @defer.inlineCallbacks
531         def work2_thread():
532             while True:
533                 try:
534                     set_real_work2()
535                 except:
536                     log.err()
537                 yield deferral.sleep(random.expovariate(1/20))
538         
539         work1_thread()
540         work2_thread()
541         
542         
543         if hasattr(signal, 'SIGALRM'):
544             def watchdog_handler(signum, frame):
545                 print 'Watchdog timer went off at:'
546                 traceback.print_stack()
547             
548             signal.signal(signal.SIGALRM, watchdog_handler)
549             task.LoopingCall(signal.alarm, 30).start(1)
550         
551         
552         def read_stale_frac(share):
553             if len(share.nonce) != 20:
554                 return None
555             a, b = struct.unpack("<HH", share.nonce[-4:])
556             if a != b:
557                 return None
558             return a/65535
559         
560         while True:
561             yield deferral.sleep(3)
562             try:
563                 if time.time() > current_work2.value['last_update'] + 60:
564                     print '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
565                 if current_work.value['best_share_hash'] is not None:
566                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
567                     if height > 2:
568                         att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 720))
569                         weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**100)
570                         shares, stale_shares = get_share_counts()
571                         print 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i stale) Peers: %i' % (
572                             math.format(att_s),
573                             height,
574                             len(tracker.verified.shares),
575                             len(tracker.shares),
576                             weights.get(my_script, 0)/total_weight*100,
577                             math.format(weights.get(my_script, 0)/total_weight*att_s),
578                             shares,
579                             stale_shares,
580                             len(p2p_node.peers),
581                         ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
582                         fracs = [read_stale_frac(share) for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if read_stale_frac(share) is not None]
583                         if fracs:
584                             med = math.median(fracs)
585                             print 'Median stale proportion:', med
586                             if shares:
587                                 print '    Own:', stale_shares/shares
588                                 if med < .99:
589                                     print '    Own efficiency: %.02f%%' % (100*(1 - stale_shares/shares)/(1 - med),)
590                             
591                             
592             except:
593                 log.err()
594     except:
595         log.err(None, 'Fatal error:')
596     finally:
597         reactor.stop()
598
599 def run():
600     class FixedArgumentParser(argparse.ArgumentParser):
601         def _read_args_from_files(self, arg_strings):
602             # expand arguments referencing files
603             new_arg_strings = []
604             for arg_string in arg_strings:
605                 
606                 # for regular arguments, just add them back into the list
607                 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
608                     new_arg_strings.append(arg_string)
609                 
610                 # replace arguments referencing files with the file content
611                 else:
612                     try:
613                         args_file = open(arg_string[1:])
614                         try:
615                             arg_strings = []
616                             for arg_line in args_file.read().splitlines():
617                                 for arg in self.convert_arg_line_to_args(arg_line):
618                                     arg_strings.append(arg)
619                             arg_strings = self._read_args_from_files(arg_strings)
620                             new_arg_strings.extend(arg_strings)
621                         finally:
622                             args_file.close()
623                     except IOError:
624                         err = sys.exc_info()[1]
625                         self.error(str(err))
626             
627             # return the modified argument list
628             return new_arg_strings
629     
630     parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,), fromfile_prefix_chars='@')
631     parser.convert_arg_line_to_args = lambda arg_line: (arg for arg in arg_line.split() if arg.strip())
632     parser.add_argument('--version', action='version', version=p2pool_init.__version__)
633     parser.add_argument('--net',
634         help='use specified network (default: bitcoin)',
635         action='store', choices=sorted(x for x in p2pool.nets if 'testnet' not in x), default='bitcoin', dest='net_name')
636     parser.add_argument('--testnet',
637         help='''use the network's testnet''',
638         action='store_const', const=True, default=False, dest='testnet')
639     parser.add_argument('--debug',
640         help='debugging mode',
641         action='store_const', const=True, default=False, dest='debug')
642     parser.add_argument('-a', '--address',
643         help='generate to this address (defaults to requesting one from bitcoind)',
644         type=str, action='store', default=None, dest='address')
645     parser.add_argument('--charts',
646         help='generate charts on the web interface (requires PIL and pygame)',
647         action='store_const', const=True, default=False, dest='charts')
648     parser.add_argument('--logfile',
649         help='''log to specific file (defaults to <network_name>.log in run_p2pool.py's directory)''',
650         type=str, action='store', default=None, dest='logfile')
651     
652     p2pool_group = parser.add_argument_group('p2pool interface')
653     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
654         help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
655         type=int, action='store', default=None, dest='p2pool_port')
656     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
657         help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
658         type=str, action='append', default=[], dest='p2pool_nodes')
659     parser.add_argument('-l', '--low-bandwidth',
660         help='trade lower bandwidth usage for higher latency (reduced efficiency)',
661         action='store_true', default=False, dest='low_bandwidth')
662     parser.add_argument('--disable-upnp',
663         help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
664         action='store_false', default=True, dest='upnp')
665     
666     worker_group = parser.add_argument_group('worker interface')
667     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
668         help='listen on PORT for RPC connections from miners asking for work and providing responses (default: bitcoin: 9332 namecoin: 9331 ixcoin: 9330 i0coin: 9329, +10000 for testnets)',
669         type=int, action='store', default=None, dest='worker_port')
670     
671     bitcoind_group = parser.add_argument_group('bitcoind interface')
672     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
673         help='connect to a bitcoind at this address (default: 127.0.0.1)',
674         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
675     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
676         help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332, 8338 for ixcoin)',
677         type=int, action='store', default=None, dest='bitcoind_rpc_port')
678     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
679         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
680         type=int, action='store', default=None, dest='bitcoind_p2p_port')
681     
682     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSER',
683         help='bitcoind RPC interface username (default: empty)',
684         type=str, action='store', default='', nargs='?', dest='bitcoind_rpc_username')
685     bitcoind_group.add_argument(metavar='BITCOIND_RPCPASSWORD',
686         help='bitcoind RPC interface password',
687         type=str, action='store', dest='bitcoind_rpc_password')
688     
689     args = parser.parse_args()
690     
691     if args.debug:
692         p2pool_init.DEBUG = True
693     
694     if args.logfile is None:
695        args.logfile = os.path.join(os.path.dirname(sys.argv[0]), args.net_name + ('_testnet' if args.testnet else '') + '.log')
696     
697     class LogFile(object):
698         def __init__(self, filename):
699             self.filename = filename
700             self.inner_file = None
701             self.reopen()
702         def reopen(self):
703             if self.inner_file is not None:
704                 self.inner_file.close()
705             open(self.filename, 'a').close()
706             f = open(self.filename, 'rb')
707             f.seek(0, os.SEEK_END)
708             length = f.tell()
709             if length > 100*1000*1000:
710                 f.seek(-1000*1000, os.SEEK_END)
711                 while True:
712                     if f.read(1) in ('', '\n'):
713                         break
714                 data = f.read()
715                 f.close()
716                 f = open(self.filename, 'wb')
717                 f.write(data)
718             f.close()
719             self.inner_file = open(self.filename, 'a')
720         def write(self, data):
721             self.inner_file.write(data)
722         def flush(self):
723             self.inner_file.flush()
724     class TeePipe(object):
725         def __init__(self, outputs):
726             self.outputs = outputs
727         def write(self, data):
728             for output in self.outputs:
729                 output.write(data)
730         def flush(self):
731             for output in self.outputs:
732                 output.flush()
733     class TimestampingPipe(object):
734         def __init__(self, inner_file):
735             self.inner_file = inner_file
736             self.buf = ''
737             self.softspace = 0
738         def write(self, data):
739             buf = self.buf + data
740             lines = buf.split('\n')
741             for line in lines[:-1]:
742                 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
743                 self.inner_file.flush()
744             self.buf = lines[-1]
745         def flush(self):
746             pass
747     logfile = LogFile(args.logfile)
748     sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
749     if hasattr(signal, "SIGUSR1"):
750         def sigusr1(signum, frame):
751             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
752             logfile.reopen()
753             print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
754         signal.signal(signal.SIGUSR1, sigusr1)
755     task.LoopingCall(logfile.reopen).start(5)
756     
757     args.net = p2pool.nets[args.net_name + ('_testnet' if args.testnet else '')]
758     
759     if args.bitcoind_rpc_port is None:
760         args.bitcoind_rpc_port = args.net.BITCOIN_RPC_PORT
761     
762     if args.bitcoind_p2p_port is None:
763         args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
764     
765     if args.p2pool_port is None:
766         args.p2pool_port = args.net.P2P_PORT
767     
768     if args.worker_port is None:
769         args.worker_port = args.net.WORKER_PORT
770     
771     if args.address is not None:
772         try:
773             args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
774         except Exception, e:
775             raise ValueError('error parsing address: ' + repr(e))
776     else:
777         args.pubkey_hash = None
778     
779     reactor.callWhenRunning(main, args)
780     reactor.run()