check that rpc interface connects to the right bitcoin/namecoin mainnet/testnet insta...
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2
3 from __future__ import division
4
5 import argparse
6 import datetime
7 import itertools
8 import os
9 import random
10 import sqlite3
11 import struct
12 import sys
13 import time
14 import json
15 import signal
16 import traceback
17
18 from twisted.internet import defer, reactor, task
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
22
23 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
24 from util import db, expiring_dict, jsonrpc, variable, deferral, math
25 from . import p2p, worker_interface, skiplists
26 import p2pool.data as p2pool
27 import p2pool as p2pool_init
28
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind):
32     # a block could arrive in between these two queries
33     getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
34     try:
35         getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
36     finally:
37         # get rid of residual errors
38         getwork_df.addErrback(lambda fail: None)
39         height_df.addErrback(lambda fail: None)
40     defer.returnValue((getwork, height))
41
42 @deferral.retry('Error getting payout script from bitcoind:', 1)
43 @defer.inlineCallbacks
44 def get_payout_script(factory):
45     res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
46     if res['reply'] == 'success':
47         defer.returnValue(res['script'])
48     elif res['reply'] == 'denied':
49         defer.returnValue(None)
50     else:
51         raise ValueError('Unexpected reply: %r' % (res,))
52
53 @deferral.retry('Error creating payout script:', 10)
54 @defer.inlineCallbacks
55 def get_payout_script2(bitcoind, net):
56     defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
57
58 @defer.inlineCallbacks
59 def main(args):
60     try:
61         if args.charts:
62             from . import draw
63         
64         print 'p2pool (version %s)' % (p2pool_init.__version__,)
65         print
66         
67         # connect to bitcoind over JSON-RPC and do initial getwork
68         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
69         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
70         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
71         good = yield args.net.BITCOIN_RPC_CHECK(bitcoind)
72         if not good:
73             print "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
74             return
75         temp_work, temp_height = yield getwork(bitcoind)
76         print '    ...success!'
77         print '    Current block hash: %x height: %i' % (temp_work.previous_block, temp_height)
78         print
79         
80         # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
81         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
82         factory = bitcoin.p2p.ClientFactory(args.net)
83         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
84         my_script = yield get_payout_script(factory)
85         if args.pubkey_hash is None:
86             if my_script is None:
87                 print '    IP transaction denied ... falling back to sending to address.'
88                 my_script = yield get_payout_script2(bitcoind, args.net)
89         else:
90             my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
91         print '    ...success!'
92         print '    Payout script:', my_script.encode('hex')
93         print
94         
95         print 'Loading cached block headers...'
96         ht = bitcoin.p2p.HeightTracker(factory, args.net.HEADERSTORE_FILENAME)
97         print '   ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
98         print
99         
100         tracker = p2pool.OkayTracker(args.net)
101         ss = p2pool.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.SHARESTORE_FILENAME), args.net)
102         known_verified = set()
103         print "Loading shares..."
104         for i, (mode, contents) in enumerate(ss.get_shares()):
105             if mode == 'share':
106                 if contents.hash in tracker.shares:
107                     continue
108                 contents.shared = True
109                 contents.stored = True
110                 tracker.add(contents)
111                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
112                     print "    %i" % (len(tracker.shares),)
113             elif mode == 'verified_hash':
114                 known_verified.add(contents)
115             else:
116                 raise AssertionError()
117         print "    ...inserting %i verified shares..." % (len(known_verified),)
118         for h in known_verified:
119             if h not in tracker.shares:
120                 continue
121             tracker.verified.add(tracker.shares[h])
122         print "    ...done loading %i shares!" % (len(tracker.shares),)
123         print
124         tracker.added.watch(ss.add_share)
125         tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
126         
127         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
128         
129         # information affecting work that should trigger a long-polling update
130         current_work = variable.Variable(None)
131         # information affecting work that should not trigger a long-polling update
132         current_work2 = variable.Variable(None)
133         
134         work_updated = variable.Event()
135         
136         requested = expiring_dict.ExpiringDict(300)
137         
138         @defer.inlineCallbacks
139         def set_real_work1():
140             work, height = yield getwork(bitcoind)
141             changed = work.previous_block != current_work.value['previous_block'] if current_work.value is not None else True
142             current_work.set(dict(
143                 version=work.version,
144                 previous_block=work.previous_block,
145                 target=work.target,
146                 height=height,
147                 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
148             ))
149             current_work2.set(dict(
150                 clock_offset=time.time() - work.timestamp,
151             ))
152             if changed:
153                 set_real_work2()
154         
155         def set_real_work2():
156             best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
157             
158             t = dict(current_work.value)
159             t['best_share_hash'] = best
160             current_work.set(t)
161             
162             t = time.time()
163             for peer2, share_hash in desired:
164                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
165                     continue
166                 last_request_time, count = requested.get(share_hash, (None, 0))
167                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
168                     continue
169                 potential_peers = set()
170                 for head in tracker.tails[share_hash]:
171                     potential_peers.update(peer_heads.get(head, set()))
172                 potential_peers = [peer for peer in potential_peers if peer.connected2]
173                 if count == 0 and peer2 is not None and peer2.connected2:
174                     peer = peer2
175                 else:
176                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
177                     if peer is None:
178                         continue
179                 
180                 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
181                 peer.send_getshares(
182                     hashes=[share_hash],
183                     parents=2000,
184                     stops=list(set(tracker.heads) | set(
185                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
186                     ))[:100],
187                 )
188                 requested[share_hash] = t, count + 1
189         
190         print 'Initializing work...'
191         yield set_real_work1()
192         set_real_work2()
193         print '    ...success!'
194         print
195         
196         start_time = time.time() - current_work2.value['clock_offset']
197         
198         # setup p2p logic and join p2pool network
199         
200         def share_share(share, ignore_peer=None):
201             for peer in p2p_node.peers.itervalues():
202                 if peer is ignore_peer:
203                     continue
204                 #if p2pool_init.DEBUG:
205                 #    print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
206                 peer.send_shares([share])
207             share.flag_shared()
208         
209         def p2p_shares(shares, peer=None):
210             if len(shares) > 5:
211                 print 'Processing %i shares...' % (len(shares),)
212             
213             some_new = False
214             for share in shares:
215                 if share.hash in tracker.shares:
216                     #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
217                     continue
218                 some_new = True
219                 
220                 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
221                 
222                 tracker.add(share)
223                 #for peer2, share_hash in desired:
224                 #    print 'Requesting parent share %x' % (share_hash,)
225                 #    peer2.send_getshares(hashes=[share_hash], parents=2000)
226                 
227                 if share.bitcoin_hash <= share.header['target']:
228                     print
229                     print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
230                     print
231                     if factory.conn.value is not None:
232                         factory.conn.value.send_block(block=share.as_block(tracker, args.net))
233                     else:
234                         print 'No bitcoind connection! Erp!'
235             
236             if shares and peer is not None:
237                 peer_heads.setdefault(shares[0].hash, set()).add(peer)
238             
239             if some_new:
240                 set_real_work2()
241             
242             if len(shares) > 5:
243                 print '... done processing %i shares. Have: %i/~%i' % (len(shares), len(tracker.shares), 2*args.net.CHAIN_LENGTH)
244         
245         def p2p_share_hashes(share_hashes, peer):
246             t = time.time()
247             get_hashes = []
248             for share_hash in share_hashes:
249                 if share_hash in tracker.shares:
250                     continue
251                 last_request_time, count = requested.get(share_hash, (None, 0))
252                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
253                     continue
254                 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
255                 get_hashes.append(share_hash)
256                 requested[share_hash] = t, count + 1
257             
258             if share_hashes and peer is not None:
259                 peer_heads.setdefault(share_hashes[0], set()).add(peer)
260             if get_hashes:
261                 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
262         
263         def p2p_get_shares(share_hashes, parents, stops, peer):
264             parents = min(parents, 1000//len(share_hashes))
265             stops = set(stops)
266             shares = []
267             for share_hash in share_hashes:
268                 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
269                     if share.hash in stops:
270                         break
271                     shares.append(share)
272             print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
273             peer.send_shares(shares, full=True)
274         
275         print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
276         
277         def parse(x):
278             if ':' in x:
279                 ip, port = x.split(':')
280                 return ip, int(port)
281             else:
282                 return x, args.net.P2P_PORT
283         
284         nodes = set([
285             ('72.14.191.28', args.net.P2P_PORT),
286             ('62.204.197.159', args.net.P2P_PORT),
287             ('142.58.248.28', args.net.P2P_PORT),
288             ('94.23.34.145', args.net.P2P_PORT),
289         ])
290         for host in [
291             'p2pool.forre.st',
292             'dabuttonfactory.com',
293         ]:
294             try:
295                 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
296             except:
297                 log.err(None, 'Error resolving bootstrap node IP:')
298         
299         p2p_node = p2p.Node(
300             current_work=current_work,
301             port=args.p2pool_port,
302             net=args.net,
303             addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
304             mode=0 if args.low_bandwidth else 1,
305             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
306         )
307         p2p_node.handle_shares = p2p_shares
308         p2p_node.handle_share_hashes = p2p_share_hashes
309         p2p_node.handle_get_shares = p2p_get_shares
310         
311         p2p_node.start()
312         
313         # send share when the chain changes to their chain
314         def work_changed(new_work):
315             #print 'Work changed:', new_work
316             for share in tracker.get_chain_known(new_work['best_share_hash']):
317                 if share.shared:
318                     break
319                 share_share(share, share.peer)
320         current_work.changed.watch(work_changed)
321         
322         print '    ...success!'
323         print
324         
325         @defer.inlineCallbacks
326         def upnp_thread():
327             while True:
328                 try:
329                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
330                     if not is_lan:
331                         continue
332                     pm = yield portmapper.get_port_mapper()
333                     yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
334                 except:
335                     if p2pool_init.DEBUG:
336                         log.err(None, "UPnP error:")
337                 yield deferral.sleep(random.expovariate(1/120))
338         
339         if args.upnp:
340             upnp_thread()
341         
342         # start listening for workers with a JSON-RPC server
343         
344         print 'Listening for workers on port %i...' % (args.worker_port,)
345         
346         # setup worker logic
347         
348         merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
349         run_identifier = struct.pack('<Q', random.randrange(2**64))
350         
351         def compute(state, payout_script):
352             if payout_script is None:
353                 payout_script = my_script
354             if state['best_share_hash'] is None and args.net.PERSIST:
355                 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
356             if len(p2p_node.peers) == 0 and args.net.PERSIST:
357                 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
358             pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
359             pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
360             extra_txs = []
361             size = 0
362             for tx in pre_extra_txs:
363                 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
364                 if size + this_size > 500000:
365                     break
366                 extra_txs.append(tx)
367                 size += this_size
368             # XXX check sigops!
369             # XXX assuming generate_tx is smallish here..
370             generate_tx = p2pool.generate_transaction(
371                 tracker=tracker,
372                 previous_share_hash=state['best_share_hash'],
373                 new_script=payout_script,
374                 subsidy=(50*100000000 >> (state['height'] + 1)//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs),
375                 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
376                 block_target=state['target'],
377                 net=args.net,
378             )
379             print 'Generating! Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
380             #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
381             #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
382             transactions = [generate_tx] + [tx.tx for tx in extra_txs]
383             merkle_root = bitcoin.data.merkle_hash(transactions)
384             merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
385             
386             timestamp = int(time.time() - current_work2.value['clock_offset'])
387             if state['best_share_hash'] is not None:
388                 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
389                 if timestamp2 > timestamp:
390                     print 'Toff', timestamp2 - timestamp
391                     timestamp = timestamp2
392             target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
393             times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
394             #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
395             return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
396         
397         my_shares = set()
398         times = {}
399         
400         def got_response(data):
401             try:
402                 # match up with transactions
403                 header = bitcoin.getwork.decode_data(data)
404                 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
405                 if transactions is None:
406                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
407                     return False
408                 block = dict(header=header, txs=transactions)
409                 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
410                 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
411                     if factory.conn.value is not None:
412                         factory.conn.value.send_block(block=block)
413                     else:
414                         print 'No bitcoind connection! Erp!'
415                     if hash_ <= block['header']['target']:
416                         print
417                         print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
418                         print
419                 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
420                 if hash_ > target:
421                     print 'Received invalid share from worker - %x/%x' % (hash_, target)
422                     return False
423                 share = p2pool.Share.from_block(block)
424                 my_shares.add(share.hash)
425                 print 'GOT SHARE! %s prev %s age %.2fs' % (p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
426                 good = share.previous_hash == current_work.value['best_share_hash']
427                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
428                 p2p_shares([share])
429                 # eg. good = share.hash == current_work.value['best_share_hash'] here
430                 return good
431             except:
432                 log.err(None, 'Error processing data received from worker:')
433                 return False
434         
435         web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
436         
437         def get_rate():
438             if current_work.value['best_share_hash'] is not None:
439                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
440                 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height, 720))
441                 return json.dumps(att_s)
442             return json.dumps(None)
443         
444         def get_users():
445             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
446             weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
447             res = {}
448             for script in sorted(weights, key=lambda s: weights[s]):
449                 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
450             return json.dumps(res)
451         
452         class WebInterface(resource.Resource):
453             def __init__(self, func, mime_type):
454                 self.func, self.mime_type = func, mime_type
455             
456             def render_GET(self, request):
457                 request.setHeader('Content-Type', self.mime_type)
458                 return self.func()
459         
460         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
461         web_root.putChild('users', WebInterface(get_users, 'application/json'))
462         if args.charts:
463             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
464         
465         reactor.listenTCP(args.worker_port, server.Site(web_root))
466         
467         print '    ...success!'
468         print
469         
470         # done!
471         
472         tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
473         get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
474         
475         class Tx(object):
476             def __init__(self, tx, seen_at_block):
477                 self.hash = bitcoin.data.tx_type.hash256(tx)
478                 self.tx = tx
479                 self.seen_at_block = seen_at_block
480                 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
481                 #print
482                 #print '%x %r' % (seen_at_block, tx)
483                 #for mention in self.mentions:
484                 #    print '%x' % mention
485                 #print
486                 self.parents_all_in_blocks = False
487                 self.value_in = 0
488                 #print self.tx
489                 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
490                 self._find_parents_in_blocks()
491             
492             @defer.inlineCallbacks
493             def _find_parents_in_blocks(self):
494                 for tx_in in self.tx['tx_ins']:
495                     try:
496                         raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
497                     except Exception:
498                         return
499                     self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
500                     #print raw_transaction
501                     if not raw_transaction['parent_blocks']:
502                         return
503                 self.parents_all_in_blocks = True
504             
505             def is_good(self):
506                 if not self.parents_all_in_blocks:
507                     return False
508                 x = self.is_good2()
509                 #print 'is_good:', x
510                 return x
511         
512         @defer.inlineCallbacks
513         def new_tx(tx_hash):
514             try:
515                 assert isinstance(tx_hash, (int, long))
516                 #print 'REQUESTING', tx_hash
517                 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
518                 #print 'GOT', tx
519                 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
520             except:
521                 log.err(None, 'Error handling tx:')
522         # disable for now, for testing impact on stales
523         #factory.new_tx.watch(new_tx)
524         
525         def new_block(block_hash):
526             work_updated.happened()
527         factory.new_block.watch(new_block)
528         
529         print 'Started successfully!'
530         print
531         
532         ht.updated.watch(set_real_work2)
533         
534         @defer.inlineCallbacks
535         def work1_thread():
536             while True:
537                 flag = work_updated.get_deferred()
538                 try:
539                     yield set_real_work1()
540                 except:
541                     log.err()
542                 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/20))], fireOnOneCallback=True)
543         
544         @defer.inlineCallbacks
545         def work2_thread():
546             while True:
547                 try:
548                     set_real_work2()
549                 except:
550                     log.err()
551                 yield deferral.sleep(random.expovariate(1/20))
552         
553         work1_thread()
554         work2_thread()
555         
556         
557         def watchdog_handler(signum, frame):
558             print "Watchdog timer went off at:"
559             traceback.print_exc()
560         
561         signal.signal(signal.SIGALRM, watchdog_handler)
562         task.LoopingCall(signal.alarm, 30).start(1)
563         
564         
565         counter = skiplists.CountsSkipList(tracker, run_identifier)
566         
567         while True:
568             yield deferral.sleep(3)
569             try:
570                 if current_work.value['best_share_hash'] is not None:
571                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
572                     if height > 2:
573                         att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 120))
574                         weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
575                         matching_in_chain = counter(current_work.value['best_share_hash'], height)
576                         shares_in_chain = my_shares & matching_in_chain
577                         stale_shares = my_shares - matching_in_chain
578                         print 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i stale) Peers: %i' % (
579                             math.format(att_s),
580                             height,
581                             len(tracker.verified.shares),
582                             len(tracker.shares),
583                             weights.get(my_script, 0)/total_weight*100,
584                             math.format(weights.get(my_script, 0)/total_weight*att_s),
585                             len(shares_in_chain) + len(stale_shares),
586                             len(stale_shares),
587                             len(p2p_node.peers),
588                         ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
589                         #weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 100), 2**100)
590                         #for k, v in weights.iteritems():
591                         #    print k.encode('hex'), v/total_weight
592             except:
593                 log.err()
594     except:
595         log.err(None, 'Fatal error:')
596     finally:
597         reactor.stop()
598
599 def run():
600     parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,))
601     parser.add_argument('--version', action='version', version=p2pool_init.__version__)
602     parser.add_argument('--namecoin',
603         help='use namecoin instead of bitcoin',
604         action='store_const', const=True, default=False, dest='namecoin')
605     parser.add_argument('--testnet',
606         help='use the testnet',
607         action='store_const', const=True, default=False, dest='testnet')
608     parser.add_argument('--debug',
609         help='debugging mode',
610         action='store_const', const=True, default=False, dest='debug')
611     parser.add_argument('-a', '--address',
612         help='generate to this address (defaults to requesting one from bitcoind)',
613         type=str, action='store', default=None, dest='address')
614     parser.add_argument('--charts',
615         help='generate charts on the web interface (requires PIL and pygame)',
616         action='store_const', const=True, default=False, dest='charts')
617     
618     p2pool_group = parser.add_argument_group('p2pool interface')
619     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
620         help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
621         type=int, action='store', default=None, dest='p2pool_port')
622     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
623         help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
624         type=str, action='append', default=[], dest='p2pool_nodes')
625     parser.add_argument('-l', '--low-bandwidth',
626         help='trade lower bandwidth usage for higher latency (reduced efficiency)',
627         action='store_true', default=False, dest='low_bandwidth')
628     parser.add_argument('--disable-upnp',
629         help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
630         action='store_false', default=True, dest='upnp')
631     
632     worker_group = parser.add_argument_group('worker interface')
633     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
634         help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
635         type=int, action='store', default=9332, dest='worker_port')
636     
637     bitcoind_group = parser.add_argument_group('bitcoind interface')
638     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
639         help='connect to a bitcoind at this address (default: 127.0.0.1)',
640         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
641     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
642         help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
643         type=int, action='store', default=8332, dest='bitcoind_rpc_port')
644     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
645         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
646         type=int, action='store', default=None, dest='bitcoind_p2p_port')
647     
648     bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
649         help='bitcoind RPC interface username',
650         type=str, action='store', dest='bitcoind_rpc_username')
651     bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
652         help='bitcoind RPC interface password',
653         type=str, action='store', dest='bitcoind_rpc_password')
654     
655     args = parser.parse_args()
656     
657     if args.debug:
658         p2pool_init.DEBUG = True
659         class ReopeningFile(object):
660             def __init__(self, *open_args, **open_kwargs):
661                 self.open_args, self.open_kwargs = open_args, open_kwargs
662                 self.inner_file = open(*self.open_args, **self.open_kwargs)
663             def reopen(self):
664                 self.inner_file.close()
665                 self.inner_file = open(*self.open_args, **self.open_kwargs)
666             def write(self, data):
667                 self.inner_file.write(data)
668             def flush(self):
669                 self.inner_file.flush()
670         class TeePipe(object):
671             def __init__(self, outputs):
672                 self.outputs = outputs
673             def write(self, data):
674                 for output in self.outputs:
675                     output.write(data)
676             def flush(self):
677                 for output in self.outputs:
678                     output.flush()
679         class TimestampingPipe(object):
680             def __init__(self, inner_file):
681                 self.inner_file = inner_file
682                 self.buf = ''
683                 self.softspace = 0
684             def write(self, data):
685                 buf = self.buf + data
686                 lines = buf.split('\n')
687                 for line in lines[:-1]:
688                     self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
689                     self.inner_file.flush()
690                 self.buf = lines[-1]
691             def flush(self):
692                 pass
693         logfile = ReopeningFile(os.path.join(os.path.dirname(sys.argv[0]), 'debug.log'), 'w')
694         sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
695         if hasattr(signal, "SIGUSR1"):
696             def sigusr1(signum, frame):
697                 print '''Caught SIGUSR1, closing 'debug.log'...'''
698                 logfile.reopen()
699                 print '''...and reopened 'debug.log' after catching SIGUSR1.'''
700             signal.signal(signal.SIGUSR1, sigusr1)
701     
702     args.net = {
703         (False, False): p2pool.Mainnet,
704         (False, True): p2pool.Testnet,
705         (True, False): p2pool.NamecoinMainnet,
706         (True, True): p2pool.NamecoinTestnet,
707     }[args.namecoin, args.testnet]
708     
709     if args.bitcoind_p2p_port is None:
710         args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
711     
712     if args.p2pool_port is None:
713         args.p2pool_port = args.net.P2P_PORT
714     
715     if args.address is not None:
716         try:
717             args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
718         except Exception, e:
719             raise ValueError('error parsing address: ' + repr(e))
720     else:
721         args.pubkey_hash = None
722     
723     reactor.callWhenRunning(main, args)
724     reactor.run()