don't give out work if not connected to peers
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2
3 from __future__ import division
4
5 import argparse
6 import datetime
7 import itertools
8 import os
9 import random
10 import sqlite3
11 import struct
12 import sys
13 import time
14 import json
15 import signal
16 import traceback
17
18 from twisted.internet import defer, reactor, task
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
22
23 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
24 from util import db, expiring_dict, jsonrpc, variable, deferral, math
25 from . import p2p, worker_interface, skiplists
26 import p2pool.data as p2pool
27 import p2pool as p2pool_init
28
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind):
32     # a block could arrive in between these two queries
33     getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
34     try:
35         getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
36     finally:
37         # get rid of residual errors
38         getwork_df.addErrback(lambda fail: None)
39         height_df.addErrback(lambda fail: None)
40     defer.returnValue((getwork, height))
41
42 @deferral.retry('Error getting payout script from bitcoind:', 1)
43 @defer.inlineCallbacks
44 def get_payout_script(factory):
45     res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
46     if res['reply'] == 'success':
47         defer.returnValue(res['script'])
48     elif res['reply'] == 'denied':
49         defer.returnValue(None)
50     else:
51         raise ValueError('Unexpected reply: %r' % (res,))
52
53 @deferral.retry('Error creating payout script:', 10)
54 @defer.inlineCallbacks
55 def get_payout_script2(bitcoind, net):
56     defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
57
58 @defer.inlineCallbacks
59 def main(args):
60     try:
61         if args.charts:
62             from . import draw
63         
64         print 'p2pool (version %s)' % (p2pool_init.__version__,)
65         print
66         
67         # connect to bitcoind over JSON-RPC and do initial getwork
68         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
69         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
70         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
71         temp_work, temp_height = yield getwork(bitcoind)
72         print '    ...success!'
73         print '    Current block hash: %x height: %i' % (temp_work.previous_block, temp_height)
74         print
75         
76         # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
77         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
78         factory = bitcoin.p2p.ClientFactory(args.net)
79         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
80         my_script = yield get_payout_script(factory)
81         if args.pubkey_hash is None:
82             if my_script is None:
83                 print '    IP transaction denied ... falling back to sending to address.'
84                 my_script = yield get_payout_script2(bitcoind, args.net)
85         else:
86             my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
87         print '    ...success!'
88         print '    Payout script:', my_script.encode('hex')
89         print
90         
91         print 'Loading cached block headers...'
92         ht = bitcoin.p2p.HeightTracker(factory, args.net.HEADERSTORE_FILENAME)
93         print '   ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
94         print
95         
96         tracker = p2pool.OkayTracker(args.net)
97         ss = p2pool.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.SHARESTORE_FILENAME), args.net)
98         known_verified = set()
99         print "Loading shares..."
100         for i, (mode, contents) in enumerate(ss.get_shares()):
101             if mode == 'share':
102                 if contents.hash in tracker.shares:
103                     continue
104                 contents.shared = True
105                 contents.stored = True
106                 tracker.add(contents)
107                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
108                     print "    %i" % (len(tracker.shares),)
109             elif mode == 'verified_hash':
110                 known_verified.add(contents)
111             else:
112                 raise AssertionError()
113         print "    ...inserting %i verified shares..." % (len(known_verified),)
114         for h in known_verified:
115             if h not in tracker.shares:
116                 continue
117             tracker.verified.add(tracker.shares[h])
118         print "    ...done loading %i shares!" % (len(tracker.shares),)
119         print
120         tracker.added.watch(ss.add_share)
121         tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
122         
123         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
124         
125         # information affecting work that should trigger a long-polling update
126         current_work = variable.Variable(None)
127         # information affecting work that should not trigger a long-polling update
128         current_work2 = variable.Variable(None)
129         
130         work_updated = variable.Event()
131         
132         requested = expiring_dict.ExpiringDict(300)
133         
134         @defer.inlineCallbacks
135         def set_real_work1():
136             work, height = yield getwork(bitcoind)
137             changed = work.previous_block != current_work.value['previous_block'] if current_work.value is not None else True
138             current_work.set(dict(
139                 version=work.version,
140                 previous_block=work.previous_block,
141                 target=work.target,
142                 height=height,
143                 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
144             ))
145             current_work2.set(dict(
146                 clock_offset=time.time() - work.timestamp,
147             ))
148             if changed:
149                 set_real_work2()
150         
151         def set_real_work2():
152             best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
153             
154             t = dict(current_work.value)
155             t['best_share_hash'] = best
156             current_work.set(t)
157             
158             t = time.time()
159             for peer2, share_hash in desired:
160                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
161                     continue
162                 last_request_time, count = requested.get(share_hash, (None, 0))
163                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
164                     continue
165                 potential_peers = set()
166                 for head in tracker.tails[share_hash]:
167                     potential_peers.update(peer_heads.get(head, set()))
168                 potential_peers = [peer for peer in potential_peers if peer.connected2]
169                 if count == 0 and peer2 is not None and peer2.connected2:
170                     peer = peer2
171                 else:
172                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
173                     if peer is None:
174                         continue
175                 
176                 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
177                 peer.send_getshares(
178                     hashes=[share_hash],
179                     parents=2000,
180                     stops=list(set(tracker.heads) | set(
181                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
182                     ))[:100],
183                 )
184                 requested[share_hash] = t, count + 1
185         
186         print 'Initializing work...'
187         yield set_real_work1()
188         set_real_work2()
189         print '    ...success!'
190         print
191         
192         start_time = time.time() - current_work2.value['clock_offset']
193         
194         # setup p2p logic and join p2pool network
195         
196         def share_share(share, ignore_peer=None):
197             for peer in p2p_node.peers.itervalues():
198                 if peer is ignore_peer:
199                     continue
200                 #if p2pool_init.DEBUG:
201                 #    print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
202                 peer.send_shares([share])
203             share.flag_shared()
204         
205         def p2p_shares(shares, peer=None):
206             if len(shares) > 5:
207                 print 'Processing %i shares...' % (len(shares),)
208             
209             some_new = False
210             for share in shares:
211                 if share.hash in tracker.shares:
212                     #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
213                     continue
214                 some_new = True
215                 
216                 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
217                 
218                 tracker.add(share)
219                 #for peer2, share_hash in desired:
220                 #    print 'Requesting parent share %x' % (share_hash,)
221                 #    peer2.send_getshares(hashes=[share_hash], parents=2000)
222                 
223                 if share.bitcoin_hash <= share.header['target']:
224                     print
225                     print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
226                     print
227                     if factory.conn.value is not None:
228                         factory.conn.value.send_block(block=share.as_block(tracker, args.net))
229                     else:
230                         print 'No bitcoind connection! Erp!'
231             
232             if shares and peer is not None:
233                 peer_heads.setdefault(shares[0].hash, set()).add(peer)
234             
235             if some_new:
236                 set_real_work2()
237             
238             if len(shares) > 5:
239                 print '... done processing %i shares. Have: %i/~%i' % (len(shares), len(tracker.shares), 2*args.net.CHAIN_LENGTH)
240         
241         def p2p_share_hashes(share_hashes, peer):
242             t = time.time()
243             get_hashes = []
244             for share_hash in share_hashes:
245                 if share_hash in tracker.shares:
246                     continue
247                 last_request_time, count = requested.get(share_hash, (None, 0))
248                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
249                     continue
250                 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
251                 get_hashes.append(share_hash)
252                 requested[share_hash] = t, count + 1
253             
254             if share_hashes and peer is not None:
255                 peer_heads.setdefault(share_hashes[0], set()).add(peer)
256             if get_hashes:
257                 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
258         
259         def p2p_get_shares(share_hashes, parents, stops, peer):
260             parents = min(parents, 1000//len(share_hashes))
261             stops = set(stops)
262             shares = []
263             for share_hash in share_hashes:
264                 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
265                     if share.hash in stops:
266                         break
267                     shares.append(share)
268             print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
269             peer.send_shares(shares, full=True)
270         
271         print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
272         
273         def parse(x):
274             if ':' in x:
275                 ip, port = x.split(':')
276                 return ip, int(port)
277             else:
278                 return x, args.net.P2P_PORT
279         
280         nodes = set([
281             ('72.14.191.28', args.net.P2P_PORT),
282             ('62.204.197.159', args.net.P2P_PORT),
283             ('142.58.248.28', args.net.P2P_PORT),
284             ('94.23.34.145', args.net.P2P_PORT),
285         ])
286         for host in [
287             'p2pool.forre.st',
288             'dabuttonfactory.com',
289         ]:
290             try:
291                 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
292             except:
293                 log.err(None, 'Error resolving bootstrap node IP:')
294         
295         p2p_node = p2p.Node(
296             current_work=current_work,
297             port=args.p2pool_port,
298             net=args.net,
299             addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
300             mode=0 if args.low_bandwidth else 1,
301             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
302         )
303         p2p_node.handle_shares = p2p_shares
304         p2p_node.handle_share_hashes = p2p_share_hashes
305         p2p_node.handle_get_shares = p2p_get_shares
306         
307         p2p_node.start()
308         
309         # send share when the chain changes to their chain
310         def work_changed(new_work):
311             #print 'Work changed:', new_work
312             for share in tracker.get_chain_known(new_work['best_share_hash']):
313                 if share.shared:
314                     break
315                 share_share(share, share.peer)
316         current_work.changed.watch(work_changed)
317         
318         print '    ...success!'
319         print
320         
321         @defer.inlineCallbacks
322         def upnp_thread():
323             while True:
324                 try:
325                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
326                     if not is_lan:
327                         continue
328                     pm = yield portmapper.get_port_mapper()
329                     yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
330                 except:
331                     if p2pool_init.DEBUG:
332                         log.err(None, "UPnP error:")
333                 yield deferral.sleep(random.expovariate(1/120))
334         
335         if args.upnp:
336             upnp_thread()
337         
338         # start listening for workers with a JSON-RPC server
339         
340         print 'Listening for workers on port %i...' % (args.worker_port,)
341         
342         # setup worker logic
343         
344         merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
345         run_identifier = struct.pack('<Q', random.randrange(2**64))
346         
347         def compute(state, payout_script):
348             if payout_script is None:
349                 payout_script = my_script
350             if state['best_share_hash'] is None and args.net.PERSIST:
351                 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
352             if len(p2p_node.peers) == 0 and args.net.PERSIST:
353                 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
354             pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
355             pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
356             extra_txs = []
357             size = 0
358             for tx in pre_extra_txs:
359                 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
360                 if size + this_size > 500000:
361                     break
362                 extra_txs.append(tx)
363                 size += this_size
364             # XXX check sigops!
365             # XXX assuming generate_tx is smallish here..
366             generate_tx = p2pool.generate_transaction(
367                 tracker=tracker,
368                 previous_share_hash=state['best_share_hash'],
369                 new_script=payout_script,
370                 subsidy=(50*100000000 >> (state['height'] + 1)//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs),
371                 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
372                 block_target=state['target'],
373                 net=args.net,
374             )
375             print 'Generating! Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
376             #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
377             #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
378             transactions = [generate_tx] + [tx.tx for tx in extra_txs]
379             merkle_root = bitcoin.data.merkle_hash(transactions)
380             merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
381             
382             timestamp = int(time.time() - current_work2.value['clock_offset'])
383             if state['best_share_hash'] is not None:
384                 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
385                 if timestamp2 > timestamp:
386                     print 'Toff', timestamp2 - timestamp
387                     timestamp = timestamp2
388             target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
389             times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
390             #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
391             return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
392         
393         my_shares = set()
394         times = {}
395         
396         def got_response(data):
397             try:
398                 # match up with transactions
399                 header = bitcoin.getwork.decode_data(data)
400                 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
401                 if transactions is None:
402                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
403                     return False
404                 block = dict(header=header, txs=transactions)
405                 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
406                 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
407                     if factory.conn.value is not None:
408                         factory.conn.value.send_block(block=block)
409                     else:
410                         print 'No bitcoind connection! Erp!'
411                     if hash_ <= block['header']['target']:
412                         print
413                         print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
414                         print
415                 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
416                 if hash_ > target:
417                     print 'Received invalid share from worker - %x/%x' % (hash_, target)
418                     return False
419                 share = p2pool.Share.from_block(block)
420                 my_shares.add(share.hash)
421                 print 'GOT SHARE! %s prev %s age %.2fs' % (p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
422                 good = share.previous_hash == current_work.value['best_share_hash']
423                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
424                 p2p_shares([share])
425                 # eg. good = share.hash == current_work.value['best_share_hash'] here
426                 return good
427             except:
428                 log.err(None, 'Error processing data received from worker:')
429                 return False
430         
431         web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
432         
433         def get_rate():
434             if current_work.value['best_share_hash'] is not None:
435                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
436                 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height, 720))
437                 return json.dumps(att_s)
438             return json.dumps(None)
439         
440         def get_users():
441             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
442             weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
443             res = {}
444             for script in sorted(weights, key=lambda s: weights[s]):
445                 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
446             return json.dumps(res)
447         
448         class WebInterface(resource.Resource):
449             def __init__(self, func, mime_type):
450                 self.func, self.mime_type = func, mime_type
451             
452             def render_GET(self, request):
453                 request.setHeader('Content-Type', self.mime_type)
454                 return self.func()
455         
456         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
457         web_root.putChild('users', WebInterface(get_users, 'application/json'))
458         if args.charts:
459             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
460         
461         reactor.listenTCP(args.worker_port, server.Site(web_root))
462         
463         print '    ...success!'
464         print
465         
466         # done!
467         
468         tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
469         get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
470         
471         class Tx(object):
472             def __init__(self, tx, seen_at_block):
473                 self.hash = bitcoin.data.tx_type.hash256(tx)
474                 self.tx = tx
475                 self.seen_at_block = seen_at_block
476                 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
477                 #print
478                 #print '%x %r' % (seen_at_block, tx)
479                 #for mention in self.mentions:
480                 #    print '%x' % mention
481                 #print
482                 self.parents_all_in_blocks = False
483                 self.value_in = 0
484                 #print self.tx
485                 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
486                 self._find_parents_in_blocks()
487             
488             @defer.inlineCallbacks
489             def _find_parents_in_blocks(self):
490                 for tx_in in self.tx['tx_ins']:
491                     try:
492                         raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
493                     except Exception:
494                         return
495                     self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
496                     #print raw_transaction
497                     if not raw_transaction['parent_blocks']:
498                         return
499                 self.parents_all_in_blocks = True
500             
501             def is_good(self):
502                 if not self.parents_all_in_blocks:
503                     return False
504                 x = self.is_good2()
505                 #print 'is_good:', x
506                 return x
507         
508         @defer.inlineCallbacks
509         def new_tx(tx_hash):
510             try:
511                 assert isinstance(tx_hash, (int, long))
512                 #print 'REQUESTING', tx_hash
513                 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
514                 #print 'GOT', tx
515                 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
516             except:
517                 log.err(None, 'Error handling tx:')
518         # disable for now, for testing impact on stales
519         #factory.new_tx.watch(new_tx)
520         
521         def new_block(block_hash):
522             work_updated.happened()
523         factory.new_block.watch(new_block)
524         
525         print 'Started successfully!'
526         print
527         
528         ht.updated.watch(set_real_work2)
529         
530         @defer.inlineCallbacks
531         def work1_thread():
532             while True:
533                 flag = work_updated.get_deferred()
534                 try:
535                     yield set_real_work1()
536                 except:
537                     log.err()
538                 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/20))], fireOnOneCallback=True)
539         
540         @defer.inlineCallbacks
541         def work2_thread():
542             while True:
543                 try:
544                     set_real_work2()
545                 except:
546                     log.err()
547                 yield deferral.sleep(random.expovariate(1/20))
548         
549         work1_thread()
550         work2_thread()
551         
552         
553         def watchdog_handler(signum, frame):
554             print "Watchdog timer went off at:"
555             traceback.print_exc()
556         
557         signal.signal(signal.SIGALRM, watchdog_handler)
558         task.LoopingCall(signal.alarm, 30).start(1)
559         
560         
561         counter = skiplists.CountsSkipList(tracker, run_identifier)
562         
563         while True:
564             yield deferral.sleep(3)
565             try:
566                 if current_work.value['best_share_hash'] is not None:
567                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
568                     if height > 2:
569                         att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 120))
570                         weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
571                         matching_in_chain = counter(current_work.value['best_share_hash'], height)
572                         shares_in_chain = my_shares & matching_in_chain
573                         stale_shares = my_shares - matching_in_chain
574                         print 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i stale) Peers: %i' % (
575                             math.format(att_s),
576                             height,
577                             len(tracker.verified.shares),
578                             len(tracker.shares),
579                             weights.get(my_script, 0)/total_weight*100,
580                             math.format(weights.get(my_script, 0)/total_weight*att_s),
581                             len(shares_in_chain) + len(stale_shares),
582                             len(stale_shares),
583                             len(p2p_node.peers),
584                         ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
585                         #weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 100), 2**100)
586                         #for k, v in weights.iteritems():
587                         #    print k.encode('hex'), v/total_weight
588             except:
589                 log.err()
590     except:
591         log.err(None, 'Fatal error:')
592         reactor.stop()
593
594 def run():
595     parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,))
596     parser.add_argument('--version', action='version', version=p2pool_init.__version__)
597     parser.add_argument('--namecoin',
598         help='use namecoin instead of bitcoin',
599         action='store_const', const=True, default=False, dest='namecoin')
600     parser.add_argument('--testnet',
601         help='use the testnet',
602         action='store_const', const=True, default=False, dest='testnet')
603     parser.add_argument('--debug',
604         help='debugging mode',
605         action='store_const', const=True, default=False, dest='debug')
606     parser.add_argument('-a', '--address',
607         help='generate to this address (defaults to requesting one from bitcoind)',
608         type=str, action='store', default=None, dest='address')
609     parser.add_argument('--charts',
610         help='generate charts on the web interface (requires PIL and pygame)',
611         action='store_const', const=True, default=False, dest='charts')
612     
613     p2pool_group = parser.add_argument_group('p2pool interface')
614     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
615         help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
616         type=int, action='store', default=None, dest='p2pool_port')
617     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
618         help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
619         type=str, action='append', default=[], dest='p2pool_nodes')
620     parser.add_argument('-l', '--low-bandwidth',
621         help='trade lower bandwidth usage for higher latency (reduced efficiency)',
622         action='store_true', default=False, dest='low_bandwidth')
623     parser.add_argument('--disable-upnp',
624         help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
625         action='store_false', default=True, dest='upnp')
626     
627     worker_group = parser.add_argument_group('worker interface')
628     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
629         help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
630         type=int, action='store', default=9332, dest='worker_port')
631     
632     bitcoind_group = parser.add_argument_group('bitcoind interface')
633     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
634         help='connect to a bitcoind at this address (default: 127.0.0.1)',
635         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
636     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
637         help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
638         type=int, action='store', default=8332, dest='bitcoind_rpc_port')
639     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
640         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
641         type=int, action='store', default=None, dest='bitcoind_p2p_port')
642     
643     bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
644         help='bitcoind RPC interface username',
645         type=str, action='store', dest='bitcoind_rpc_username')
646     bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
647         help='bitcoind RPC interface password',
648         type=str, action='store', dest='bitcoind_rpc_password')
649     
650     args = parser.parse_args()
651     
652     if args.debug:
653         p2pool_init.DEBUG = True
654         class ReopeningFile(object):
655             def __init__(self, *open_args, **open_kwargs):
656                 self.open_args, self.open_kwargs = open_args, open_kwargs
657                 self.inner_file = open(*self.open_args, **self.open_kwargs)
658             def reopen(self):
659                 self.inner_file.close()
660                 self.inner_file = open(*self.open_args, **self.open_kwargs)
661             def write(self, data):
662                 self.inner_file.write(data)
663             def flush(self):
664                 self.inner_file.flush()
665         class TeePipe(object):
666             def __init__(self, outputs):
667                 self.outputs = outputs
668             def write(self, data):
669                 for output in self.outputs:
670                     output.write(data)
671             def flush(self):
672                 for output in self.outputs:
673                     output.flush()
674         class TimestampingPipe(object):
675             def __init__(self, inner_file):
676                 self.inner_file = inner_file
677                 self.buf = ''
678                 self.softspace = 0
679             def write(self, data):
680                 buf = self.buf + data
681                 lines = buf.split('\n')
682                 for line in lines[:-1]:
683                     self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
684                     self.inner_file.flush()
685                 self.buf = lines[-1]
686             def flush(self):
687                 pass
688         logfile = ReopeningFile(os.path.join(os.path.dirname(sys.argv[0]), 'debug.log'), 'w')
689         sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
690         if hasattr(signal, "SIGUSR1"):
691             def sigusr1(signum, frame):
692                 print '''Caught SIGUSR1, closing 'debug.log'...'''
693                 logfile.reopen()
694                 print '''...and reopened 'debug.log' after catching SIGUSR1.'''
695             signal.signal(signal.SIGUSR1, sigusr1)
696     
697     args.net = {
698         (False, False): p2pool.Mainnet,
699         (False, True): p2pool.Testnet,
700         (True, False): p2pool.NamecoinMainnet,
701         (True, True): p2pool.NamecoinTestnet,
702     }[args.namecoin, args.testnet]
703     
704     if args.bitcoind_p2p_port is None:
705         args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
706     
707     if args.p2pool_port is None:
708         args.p2pool_port = args.net.P2P_PORT
709     
710     if args.address is not None:
711         try:
712             args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
713         except Exception, e:
714             raise ValueError('error parsing address: ' + repr(e))
715     else:
716         args.pubkey_hash = None
717     
718     reactor.callWhenRunning(main, args)
719     reactor.run()