replaced usage of python's ConfigParser with handwritten config file parser
[p2pool.git] / p2pool / main.py
1 from __future__ import division
2
3 import base64
4 import json
5 import os
6 import random
7 import sys
8 import time
9 import signal
10 import traceback
11 import urlparse
12
13 if '--iocp' in sys.argv:
14     from twisted.internet import iocpreactor
15     iocpreactor.install()
16 from twisted.internet import defer, reactor, protocol, task
17 from twisted.web import server
18 from twisted.python import log
19 from nattraverso import portmapper, ipdiscover
20
21 import bitcoin.p2p as bitcoin_p2p, bitcoin.data as bitcoin_data
22 from bitcoin import worker_interface, height_tracker
23 from util import fixargparse, jsonrpc, variable, deferral, math, logging
24 from . import p2p, networks, web, work
25 import p2pool, p2pool.data as p2pool_data
26
27 @deferral.retry('Error getting work from bitcoind:', 3)
28 @defer.inlineCallbacks
29 def getwork(bitcoind, use_getblocktemplate=False):
30     def go():
31         if use_getblocktemplate:
32             return bitcoind.rpc_getblocktemplate(dict(mode='template'))
33         else:
34             return bitcoind.rpc_getmemorypool()
35     try:
36         work = yield go()
37     except jsonrpc.Error_for_code(-32601): # Method not found
38         use_getblocktemplate = not use_getblocktemplate
39         try:
40             work = yield go()
41         except jsonrpc.Error_for_code(-32601): # Method not found
42             print >>sys.stderr, 'Error: Bitcoin version too old! Upgrade to v0.5 or newer!'
43             raise deferral.RetrySilentlyException()
44     packed_transactions = [(x['data'] if isinstance(x, dict) else x).decode('hex') for x in work['transactions']]
45     if 'height' not in work:
46         work['height'] = (yield bitcoind.rpc_getblock(work['previousblockhash']))['height'] + 1
47     elif p2pool.DEBUG:
48         assert work['height'] == (yield bitcoind.rpc_getblock(work['previousblockhash']))['height'] + 1
49     defer.returnValue(dict(
50         version=work['version'],
51         previous_block=int(work['previousblockhash'], 16),
52         transactions=map(bitcoin_data.tx_type.unpack, packed_transactions),
53         merkle_link=bitcoin_data.calculate_merkle_link([None] + map(bitcoin_data.hash256, packed_transactions), 0),
54         subsidy=work['coinbasevalue'],
55         time=work['time'] if 'time' in work else work['curtime'],
56         bits=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
57         coinbaseflags=work['coinbaseflags'].decode('hex') if 'coinbaseflags' in work else ''.join(x.decode('hex') for x in work['coinbaseaux'].itervalues()) if 'coinbaseaux' in work else '',
58         height=work['height'],
59         last_update=time.time(),
60         use_getblocktemplate=use_getblocktemplate,
61     ))
62
63 @defer.inlineCallbacks
64 def main(args, net, datadir_path, merged_urls, worker_endpoint):
65     try:
66         print 'p2pool (version %s)' % (p2pool.__version__,)
67         print
68         
69         traffic_happened = variable.Event()
70         
71         # connect to bitcoind over JSON-RPC and do initial getmemorypool
72         url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http', args.bitcoind_address, args.bitcoind_rpc_port)
73         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
74         bitcoind = jsonrpc.Proxy(url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30)
75         @deferral.retry('Error while checking Bitcoin connection:', 1)
76         @defer.inlineCallbacks
77         def check():
78             if not (yield net.PARENT.RPC_CHECK(bitcoind)):
79                 print >>sys.stderr, "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
80                 raise deferral.RetrySilentlyException()
81             if not net.VERSION_CHECK((yield bitcoind.rpc_getinfo())['version']):
82                 print >>sys.stderr, '    Bitcoin version too old! Upgrade to 0.6.4 or newer!'
83                 raise deferral.RetrySilentlyException()
84         yield check()
85         temp_work = yield getwork(bitcoind)
86         
87         block_height_var = variable.Variable(None)
88         @defer.inlineCallbacks
89         def poll_height():
90             block_height_var.set((yield deferral.retry('Error while calling getblockcount:')(bitcoind.rpc_getblockcount)()))
91         yield poll_height()
92         task.LoopingCall(poll_height).start(60*60)
93         
94         bitcoind_warning_var = variable.Variable(None)
95         @defer.inlineCallbacks
96         def poll_warnings():
97             errors = (yield deferral.retry('Error while calling getmininginfo:')(bitcoind.rpc_getmininginfo)())['errors']
98             bitcoind_warning_var.set(errors if errors != '' else None)
99         yield poll_warnings()
100         task.LoopingCall(poll_warnings).start(20*60)
101         
102         print '    ...success!'
103         print '    Current block hash: %x' % (temp_work['previous_block'],)
104         print '    Current block height: %i' % (block_height_var.value,)
105         print
106         
107         # connect to bitcoind over bitcoin-p2p
108         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
109         factory = bitcoin_p2p.ClientFactory(net.PARENT)
110         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
111         yield factory.getProtocol() # waits until handshake is successful
112         print '    ...success!'
113         print
114         
115         print 'Determining payout address...'
116         if args.pubkey_hash is None:
117             address_path = os.path.join(datadir_path, 'cached_payout_address')
118             
119             if os.path.exists(address_path):
120                 with open(address_path, 'rb') as f:
121                     address = f.read().strip('\r\n')
122                 print '    Loaded cached address: %s...' % (address,)
123             else:
124                 address = None
125             
126             if address is not None:
127                 res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))()
128                 if not res['isvalid'] or not res['ismine']:
129                     print '    Cached address is either invalid or not controlled by local bitcoind!'
130                     address = None
131             
132             if address is None:
133                 print '    Getting payout address from bitcoind...'
134                 address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()
135             
136             with open(address_path, 'wb') as f:
137                 f.write(address)
138             
139             my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(address, net.PARENT)
140         else:
141             my_pubkey_hash = args.pubkey_hash
142         print '    ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)
143         print
144         
145         my_share_hashes = set()
146         my_doa_share_hashes = set()
147         
148         tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes)
149         shared_share_hashes = set()
150         ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
151         known_verified = set()
152         print "Loading shares..."
153         for i, (mode, contents) in enumerate(ss.get_shares()):
154             if mode == 'share':
155                 if contents.hash in tracker.items:
156                     continue
157                 shared_share_hashes.add(contents.hash)
158                 contents.time_seen = 0
159                 tracker.add(contents)
160                 if len(tracker.items) % 1000 == 0 and tracker.items:
161                     print "    %i" % (len(tracker.items),)
162             elif mode == 'verified_hash':
163                 known_verified.add(contents)
164             else:
165                 raise AssertionError()
166         print "    ...inserting %i verified shares..." % (len(known_verified),)
167         for h in known_verified:
168             if h not in tracker.items:
169                 ss.forget_verified_share(h)
170                 continue
171             tracker.verified.add(tracker.items[h])
172         print "    ...done loading %i shares!" % (len(tracker.items),)
173         print
174         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
175         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
176         tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
177         
178         print 'Initializing work...'
179         
180         
181         # BITCOIND WORK
182         
183         bitcoind_work = variable.Variable((yield getwork(bitcoind)))
184         @defer.inlineCallbacks
185         def work_poller():
186             while True:
187                 flag = factory.new_block.get_deferred()
188                 try:
189                     bitcoind_work.set((yield getwork(bitcoind, bitcoind_work.value['use_getblocktemplate'])))
190                 except:
191                     log.err()
192                 yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
193         work_poller()
194         
195         # PEER WORK
196         
197         best_block_header = variable.Variable(None)
198         def handle_header(new_header):
199             # check that header matches current target
200             if not (net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(new_header)) <= bitcoind_work.value['bits'].target):
201                 return
202             bitcoind_best_block = bitcoind_work.value['previous_block']
203             if (best_block_header.value is None
204                 or (
205                     new_header['previous_block'] == bitcoind_best_block and
206                     bitcoin_data.hash256(bitcoin_data.block_header_type.pack(best_block_header.value)) == bitcoind_best_block
207                 ) # new is child of current and previous is current
208                 or (
209                     bitcoin_data.hash256(bitcoin_data.block_header_type.pack(new_header)) == bitcoind_best_block and
210                     best_block_header.value['previous_block'] != bitcoind_best_block
211                 )): # new is current and previous is not a child of current
212                 best_block_header.set(new_header)
213         @defer.inlineCallbacks
214         def poll_header():
215             handle_header((yield factory.conn.value.get_block_header(bitcoind_work.value['previous_block'])))
216         bitcoind_work.changed.watch(lambda _: poll_header())
217         yield deferral.retry('Error while requesting best block header:')(poll_header)()
218         
219         # BEST SHARE
220         
221         get_height_rel_highest = yield height_tracker.get_height_rel_highest_func(bitcoind, factory, lambda: bitcoind_work.value['previous_block'], net)
222         
223         best_share_var = variable.Variable(None)
224         desired_var = variable.Variable(None)
225         def set_best_share():
226             best, desired = tracker.think(get_height_rel_highest, bitcoind_work.value['previous_block'], bitcoind_work.value['bits'])
227             
228             best_share_var.set(best)
229             desired_var.set(desired)
230         bitcoind_work.changed.watch(lambda _: set_best_share())
231         set_best_share()
232         
233         print '    ...success!'
234         print
235         
236         # setup p2p logic and join p2pool network
237         
238         class Node(p2p.Node):
239             def handle_shares(self, shares, peer):
240                 if len(shares) > 5:
241                     print 'Processing %i shares from %s...' % (len(shares), '%s:%i' % peer.addr if peer is not None else None)
242                 
243                 new_count = 0
244                 for share in shares:
245                     if share.hash in tracker.items:
246                         #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
247                         continue
248                     
249                     new_count += 1
250                     
251                     #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
252                     
253                     tracker.add(share)
254                 
255                 if new_count:
256                     set_best_share()
257                 
258                 if len(shares) > 5:
259                     print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.items), 2*net.CHAIN_LENGTH)
260             
261             @defer.inlineCallbacks
262             def handle_share_hashes(self, hashes, peer):
263                 new_hashes = [x for x in hashes if x not in tracker.items]
264                 if not new_hashes:
265                     return
266                 try:
267                     shares = yield peer.get_shares(
268                         hashes=new_hashes,
269                         parents=0,
270                         stops=[],
271                     )
272                 except:
273                     log.err(None, 'in handle_share_hashes:')
274                 else:
275                     self.handle_shares(shares, peer)
276             
277             def handle_get_shares(self, hashes, parents, stops, peer):
278                 parents = min(parents, 1000//len(hashes))
279                 stops = set(stops)
280                 shares = []
281                 for share_hash in hashes:
282                     for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))):
283                         if share.hash in stops:
284                             break
285                         shares.append(share)
286                 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
287                 return shares
288             
289             def handle_bestblock(self, header, peer):
290                 if net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header)) > header['bits'].target:
291                     raise p2p.PeerMisbehavingError('received block header fails PoW test')
292                 handle_header(header)
293         
294         @deferral.retry('Error submitting primary block: (will retry)', 10, 10)
295         def submit_block_p2p(block):
296             if factory.conn.value is None:
297                 print >>sys.stderr, 'No bitcoind connection when block submittal attempted! %s%064x' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, bitcoin_data.hash256(bitcoin_data.block_header_type.pack(block['header'])))
298                 raise deferral.RetrySilentlyException()
299             factory.conn.value.send_block(block=block)
300         
301         @deferral.retry('Error submitting block: (will retry)', 10, 10)
302         @defer.inlineCallbacks
303         def submit_block_rpc(block, ignore_failure):
304             if bitcoind_work.value['use_getblocktemplate']:
305                 result = yield bitcoind.rpc_submitblock(bitcoin_data.block_type.pack(block).encode('hex'))
306                 success = result is None
307             else:
308                 result = yield bitcoind.rpc_getmemorypool(bitcoin_data.block_type.pack(block).encode('hex'))
309                 success = result
310             success_expected = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(block['header'])) <= block['header']['bits'].target
311             if (not success and success_expected and not ignore_failure) or (success and not success_expected):
312                 print >>sys.stderr, 'Block submittal result: %s (%r) Expected: %s' % (success, result, success_expected)
313         
314         def submit_block(block, ignore_failure):
315             submit_block_p2p(block)
316             submit_block_rpc(block, ignore_failure)
317         
318         @tracker.verified.added.watch
319         def _(share):
320             if share.pow_hash <= share.header['bits'].target:
321                 submit_block(share.as_block(tracker), ignore_failure=True)
322                 print
323                 print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % (p2pool_data.format_hash(share.hash), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
324                 print
325                 def spread():
326                     if (get_height_rel_highest(share.header['previous_block']) > -5 or
327                         bitcoind_work.value['previous_block'] in [share.header['previous_block'], share.header_hash]):
328                         broadcast_share(share.hash)
329                 spread()
330                 reactor.callLater(5, spread) # so get_height_rel_highest can update
331         
332         print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
333         
334         @defer.inlineCallbacks
335         def parse(x):
336             if ':' in x:
337                 ip, port = x.split(':')
338                 defer.returnValue(((yield reactor.resolve(ip)), int(port)))
339             else:
340                 defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT))
341         
342         addrs = {}
343         if os.path.exists(os.path.join(datadir_path, 'addrs')):
344             try:
345                 with open(os.path.join(datadir_path, 'addrs'), 'rb') as f:
346                     addrs.update(dict((tuple(k), v) for k, v in json.loads(f.read())))
347             except:
348                 print >>sys.stderr, 'error parsing addrs'
349         for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
350             try:
351                 addr = yield addr_df
352                 if addr not in addrs:
353                     addrs[addr] = (0, time.time(), time.time())
354             except:
355                 log.err()
356         
357         connect_addrs = set()
358         for addr_df in map(parse, args.p2pool_nodes):
359             try:
360                 connect_addrs.add((yield addr_df))
361             except:
362                 log.err()
363         
364         p2p_node = Node(
365             best_share_hash_func=lambda: best_share_var.value,
366             port=args.p2pool_port,
367             net=net,
368             addr_store=addrs,
369             connect_addrs=connect_addrs,
370             max_incoming_conns=args.p2pool_conns,
371             traffic_happened=traffic_happened,
372         )
373         p2p_node.start()
374         
375         def save_addrs():
376             with open(os.path.join(datadir_path, 'addrs'), 'wb') as f:
377                 f.write(json.dumps(p2p_node.addr_store.items()))
378         task.LoopingCall(save_addrs).start(60)
379         
380         @best_block_header.changed.watch
381         def _(header):
382             for peer in p2p_node.peers.itervalues():
383                 peer.send_bestblock(header=header)
384         
385         @defer.inlineCallbacks
386         def broadcast_share(share_hash):
387             shares = []
388             for share in tracker.get_chain(share_hash, min(5, tracker.get_height(share_hash))):
389                 if share.hash in shared_share_hashes:
390                     break
391                 shared_share_hashes.add(share.hash)
392                 shares.append(share)
393             
394             for peer in list(p2p_node.peers.itervalues()):
395                 yield peer.sendShares([share for share in shares if share.peer is not peer])
396         
397         # send share when the chain changes to their chain
398         best_share_var.changed.watch(broadcast_share)
399         
400         def save_shares():
401             for share in tracker.get_chain(best_share_var.value, min(tracker.get_height(best_share_var.value), 2*net.CHAIN_LENGTH)):
402                 ss.add_share(share)
403                 if share.hash in tracker.verified.items:
404                     ss.add_verified_hash(share.hash)
405         task.LoopingCall(save_shares).start(60)
406         
407         @apply
408         @defer.inlineCallbacks
409         def download_shares():
410             while True:
411                 desired = yield desired_var.get_when_satisfies(lambda val: len(val) != 0)
412                 peer2, share_hash = random.choice(desired)
413                 
414                 if len(p2p_node.peers) == 0:
415                     yield deferral.sleep(1)
416                     continue
417                 peer = random.choice(p2p_node.peers.values())
418                 
419                 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
420                 try:
421                     shares = yield peer.get_shares(
422                         hashes=[share_hash],
423                         parents=500,
424                         stops=[],
425                     )
426                 except:
427                     log.err(None, 'in download_shares:')
428                     continue
429                 
430                 if not shares:
431                     yield deferral.sleep(1) # sleep so we don't keep rerequesting the same share nobody has
432                     continue
433                 p2p_node.handle_shares(shares, peer)
434         
435         print '    ...success!'
436         print
437         
438         if args.upnp:
439             @defer.inlineCallbacks
440             def upnp_thread():
441                 while True:
442                     try:
443                         is_lan, lan_ip = yield ipdiscover.get_local_ip()
444                         if is_lan:
445                             pm = yield portmapper.get_port_mapper()
446                             yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
447                     except defer.TimeoutError:
448                         pass
449                     except:
450                         if p2pool.DEBUG:
451                             log.err(None, 'UPnP error:')
452                     yield deferral.sleep(random.expovariate(1/120))
453             upnp_thread()
454         
455         # start listening for workers with a JSON-RPC server
456         
457         print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1])
458         
459         get_current_txouts = lambda: p2pool_data.get_expected_payouts(tracker, best_share_var.value, bitcoind_work.value['bits'].target, bitcoind_work.value['subsidy'], net)
460         
461         wb = work.WorkerBridge(my_pubkey_hash, net, args.donation_percentage, bitcoind_work, best_block_header, merged_urls, best_share_var, tracker, my_share_hashes, my_doa_share_hashes, args.worker_fee, p2p_node, submit_block, set_best_share, broadcast_share, block_height_var)
462         web_root = web.get_web_root(tracker, bitcoind_work, get_current_txouts, datadir_path, net, wb.get_stale_counts, my_pubkey_hash, wb.local_rate_monitor, args.worker_fee, p2p_node, wb.my_share_hashes, wb.pseudoshare_received, wb.share_received, best_share_var, bitcoind_warning_var, traffic_happened)
463         worker_interface.WorkerInterface(wb).attach_to(web_root, get_handler=lambda request: request.redirect('/static/'))
464         
465         deferral.retry('Error binding to worker port:', traceback=False)(reactor.listenTCP)(worker_endpoint[1], server.Site(web_root), interface=worker_endpoint[0])
466         
467         with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
468             pass
469         
470         print '    ...success!'
471         print
472         
473         
474         # done!
475         print 'Started successfully!'
476         print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % (worker_endpoint[1],)
477         if args.donation_percentage > 0.51:
478             print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % (args.donation_percentage,)
479         elif args.donation_percentage < 0.49:
480             print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % (args.donation_percentage,)
481         else:
482             print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % (args.donation_percentage,)
483             print 'You can increase this amount with --give-author argument! (or decrease it, if you must)'
484         print
485         
486         
487         if hasattr(signal, 'SIGALRM'):
488             signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
489                 sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
490             ))
491             signal.siginterrupt(signal.SIGALRM, False)
492             task.LoopingCall(signal.alarm, 30).start(1)
493         
494         if args.irc_announce:
495             from twisted.words.protocols import irc
496             class IRCClient(irc.IRCClient):
497                 nickname = 'p2pool%02i' % (random.randrange(100),)
498                 channel = net.ANNOUNCE_CHANNEL
499                 def lineReceived(self, line):
500                     if p2pool.DEBUG:
501                         print repr(line)
502                     irc.IRCClient.lineReceived(self, line)
503                 def signedOn(self):
504                     irc.IRCClient.signedOn(self)
505                     self.factory.resetDelay()
506                     self.join(self.channel)
507                     @defer.inlineCallbacks
508                     def new_share(share):
509                         if share.pow_hash <= share.header['bits'].target and abs(share.timestamp - time.time()) < 10*60:
510                             yield deferral.sleep(random.expovariate(1/60))
511                             message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
512                             if all('%x' % (share.header_hash,) not in old_message for old_message in self.recent_messages):
513                                 self.say(self.channel, message)
514                                 self._remember_message(message)
515                     self.watch_id = tracker.verified.added.watch(new_share)
516                     self.recent_messages = []
517                 def _remember_message(self, message):
518                     self.recent_messages.append(message)
519                     while len(self.recent_messages) > 100:
520                         self.recent_messages.pop(0)
521                 def privmsg(self, user, channel, message):
522                     if channel == self.channel:
523                         self._remember_message(message)
524                 def connectionLost(self, reason):
525                     tracker.verified.added.unwatch(self.watch_id)
526                     print 'IRC connection lost:', reason.getErrorMessage()
527             class IRCClientFactory(protocol.ReconnectingClientFactory):
528                 protocol = IRCClient
529             reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())
530         
531         @defer.inlineCallbacks
532         def status_thread():
533             last_str = None
534             last_time = 0
535             while True:
536                 yield deferral.sleep(3)
537                 try:
538                     height = tracker.get_height(best_share_var.value)
539                     this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
540                         height,
541                         len(tracker.verified.items),
542                         len(tracker.items),
543                         len(p2p_node.peers),
544                         sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
545                     ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
546                     
547                     datums, dt = wb.local_rate_monitor.get_datums_in_last()
548                     my_att_s = sum(datum['work']/dt for datum in datums)
549                     this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
550                         math.format(int(my_att_s)),
551                         math.format_dt(dt),
552                         math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95),
553                         math.format_dt(2**256 / tracker.items[best_share_var.value].max_target / my_att_s) if my_att_s and best_share_var.value else '???',
554                     )
555                     
556                     if height > 2:
557                         (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()
558                         stale_prop = p2pool_data.get_average_stale_prop(tracker, best_share_var.value, min(60*60//net.SHARE_PERIOD, height))
559                         real_att_s = p2pool_data.get_pool_attempts_per_second(tracker, best_share_var.value, min(height - 1, 60*60//net.SHARE_PERIOD)) / (1 - stale_prop)
560                         
561                         this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % (
562                             shares, stale_orphan_shares, stale_doa_shares,
563                             math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
564                             math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
565                             get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8, net.PARENT.SYMBOL,
566                         )
567                         this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
568                             math.format(int(real_att_s)),
569                             100*stale_prop,
570                             math.format_dt(2**256 / bitcoind_work.value['bits'].target / real_att_s),
571                         )
572                         
573                         for warning in p2pool_data.get_warnings(tracker, best_share_var.value, net, bitcoind_warning_var.value, bitcoind_work.value):
574                             print >>sys.stderr, '#'*40
575                             print >>sys.stderr, '>>> Warning: ' + warning
576                             print >>sys.stderr, '#'*40
577                     
578                     if this_str != last_str or time.time() > last_time + 15:
579                         print this_str
580                         last_str = this_str
581                         last_time = time.time()
582                 except:
583                     log.err()
584         status_thread()
585     except:
586         reactor.stop()
587         log.err(None, 'Fatal error:')
588
589 def run():
590     realnets = dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
591     
592     parser = fixargparse.FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
593     parser.add_argument('--version', action='version', version=p2pool.__version__)
594     parser.add_argument('--net',
595         help='use specified network (default: bitcoin)',
596         action='store', choices=sorted(realnets), default='bitcoin', dest='net_name')
597     parser.add_argument('--testnet',
598         help='''use the network's testnet''',
599         action='store_const', const=True, default=False, dest='testnet')
600     parser.add_argument('--debug',
601         help='enable debugging mode',
602         action='store_const', const=True, default=False, dest='debug')
603     parser.add_argument('-a', '--address',
604         help='generate payouts to this address (default: <address requested from bitcoind>)',
605         type=str, action='store', default=None, dest='address')
606     parser.add_argument('--datadir',
607         help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
608         type=str, action='store', default=None, dest='datadir')
609     parser.add_argument('--logfile',
610         help='''log to this file (default: data/<NET>/log)''',
611         type=str, action='store', default=None, dest='logfile')
612     parser.add_argument('--merged',
613         help='call getauxblock on this url to get work for merged mining (example: http://ncuser:ncpass@127.0.0.1:10332/)',
614         type=str, action='append', default=[], dest='merged_urls')
615     parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
616         help='donate this percentage of work towards the development of p2pool (default: 0.5)',
617         type=float, action='store', default=0.5, dest='donation_percentage')
618     parser.add_argument('--iocp',
619         help='use Windows IOCP API in order to avoid errors due to large number of sockets being open',
620         action='store_true', default=False, dest='iocp')
621     parser.add_argument('--irc-announce',
622         help='announce any blocks found on irc://irc.freenode.net/#p2pool',
623         action='store_true', default=False, dest='irc_announce')
624     parser.add_argument('--no-bugreport',
625         help='disable submitting caught exceptions to the author',
626         action='store_true', default=False, dest='no_bugreport')
627     
628     p2pool_group = parser.add_argument_group('p2pool interface')
629     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
630         help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
631         type=int, action='store', default=None, dest='p2pool_port')
632     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
633         help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
634         type=str, action='append', default=[], dest='p2pool_nodes')
635     parser.add_argument('--disable-upnp',
636         help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
637         action='store_false', default=True, dest='upnp')
638     p2pool_group.add_argument('--max-conns', metavar='CONNS',
639         help='maximum incoming connections (default: 40)',
640         type=int, action='store', default=40, dest='p2pool_conns')
641     
642     worker_group = parser.add_argument_group('worker interface')
643     worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT',
644         help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
645         type=str, action='store', default=None, dest='worker_endpoint')
646     worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
647         help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
648         type=float, action='store', default=0, dest='worker_fee')
649     
650     bitcoind_group = parser.add_argument_group('bitcoind interface')
651     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
652         help='connect to this address (default: 127.0.0.1)',
653         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
654     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
655         help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
656         type=int, action='store', default=None, dest='bitcoind_rpc_port')
657     bitcoind_group.add_argument('--bitcoind-rpc-ssl',
658         help='connect to JSON-RPC interface using SSL',
659         action='store_true', default=False, dest='bitcoind_rpc_ssl')
660     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
661         help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
662         type=int, action='store', default=None, dest='bitcoind_p2p_port')
663     
664     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
665         help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
666         type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
667     
668     args = parser.parse_args()
669     
670     if args.debug:
671         p2pool.DEBUG = True
672         defer.setDebugging(True)
673     
674     net_name = args.net_name + ('_testnet' if args.testnet else '')
675     net = networks.nets[net_name]
676     
677     datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
678     if not os.path.exists(datadir_path):
679         os.makedirs(datadir_path)
680     
681     if len(args.bitcoind_rpc_userpass) > 2:
682         parser.error('a maximum of two arguments are allowed')
683     args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
684     
685     if args.bitcoind_rpc_password is None:
686         conf_path = net.PARENT.CONF_FILE_FUNC()
687         if not os.path.exists(conf_path):
688             parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
689                 '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
690                 '''\r\n'''
691                 '''server=1\r\n'''
692                 '''rpcpassword=%x\r\n'''
693                 '''\r\n'''
694                 '''Keep that password secret! After creating the file, restart Bitcoin.''' % (conf_path, random.randrange(2**128)))
695         conf = open(conf_path, 'rb').read()
696         contents = {}
697         for line in conf.splitlines(True):
698             if '#' in line:
699                 line = line[:line.index('#')]
700             if '=' not in line:
701                 continue
702             k, v = line.split('=', 1)
703             contents[k.strip()] = v.strip()
704         for conf_name, var_name, var_type in [
705             ('rpcuser', 'bitcoind_rpc_username', str),
706             ('rpcpassword', 'bitcoind_rpc_password', str),
707             ('rpcport', 'bitcoind_rpc_port', int),
708             ('port', 'bitcoind_p2p_port', int),
709         ]:
710             if getattr(args, var_name) is None and conf_name in contents:
711                 setattr(args, var_name, var_type(contents[conf_name]))
712         if args.bitcoind_rpc_password is None:
713             parser.error('''Bitcoin configuration file didn't contain an rpcpassword= line! Add one!''')
714     
715     if args.bitcoind_rpc_username is None:
716         args.bitcoind_rpc_username = ''
717     
718     if args.bitcoind_rpc_port is None:
719         args.bitcoind_rpc_port = net.PARENT.RPC_PORT
720     
721     if args.bitcoind_p2p_port is None:
722         args.bitcoind_p2p_port = net.PARENT.P2P_PORT
723     
724     if args.p2pool_port is None:
725         args.p2pool_port = net.P2P_PORT
726     
727     if args.worker_endpoint is None:
728         worker_endpoint = '', net.WORKER_PORT
729     elif ':' not in args.worker_endpoint:
730         worker_endpoint = '', int(args.worker_endpoint)
731     else:
732         addr, port = args.worker_endpoint.rsplit(':', 1)
733         worker_endpoint = addr, int(port)
734     
735     if args.address is not None:
736         try:
737             args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
738         except Exception, e:
739             parser.error('error parsing address: ' + repr(e))
740     else:
741         args.pubkey_hash = None
742     
743     def separate_url(url):
744         s = urlparse.urlsplit(url)
745         if '@' not in s.netloc:
746             parser.error('merged url netloc must contain an "@"')
747         userpass, new_netloc = s.netloc.rsplit('@', 1)
748         return urlparse.urlunsplit(s._replace(netloc=new_netloc)), userpass
749     merged_urls = map(separate_url, args.merged_urls)
750     
751     if args.logfile is None:
752         args.logfile = os.path.join(datadir_path, 'log')
753     
754     logfile = logging.LogFile(args.logfile)
755     pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
756     sys.stdout = logging.AbortPipe(pipe)
757     sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
758     if hasattr(signal, "SIGUSR1"):
759         def sigusr1(signum, frame):
760             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
761             logfile.reopen()
762             print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
763         signal.signal(signal.SIGUSR1, sigusr1)
764     task.LoopingCall(logfile.reopen).start(5)
765     
766     class ErrorReporter(object):
767         def __init__(self):
768             self.last_sent = None
769         
770         def emit(self, eventDict):
771             if not eventDict["isError"]:
772                 return
773             
774             if self.last_sent is not None and time.time() < self.last_sent + 5:
775                 return
776             self.last_sent = time.time()
777             
778             if 'failure' in eventDict:
779                 text = ((eventDict.get('why') or 'Unhandled Error')
780                     + '\n' + eventDict['failure'].getTraceback())
781             else:
782                 text = " ".join([str(m) for m in eventDict["message"]]) + "\n"
783             
784             from twisted.web import client
785             client.getPage(
786                 url='http://u.forre.st/p2pool_error.cgi',
787                 method='POST',
788                 postdata=p2pool.__version__ + ' ' + net.NAME + '\n' + text,
789                 timeout=15,
790             ).addBoth(lambda x: None)
791     if not args.no_bugreport:
792         log.addObserver(ErrorReporter().emit)
793     
794     reactor.callWhenRunning(main, args, net, datadir_path, merged_urls, worker_endpoint)
795     reactor.run()