1 from __future__ import division
15 if '--iocp' in sys.argv:
16 from twisted.internet import iocpreactor
18 from twisted.internet import defer, reactor, protocol, task
19 from twisted.web import server
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
23 import bitcoin.p2p as bitcoin_p2p, bitcoin.data as bitcoin_data
24 from bitcoin import worker_interface, height_tracker
25 from util import expiring_dict, fixargparse, jsonrpc, variable, deferral, math, logging
26 from . import p2p, networks, web, work
27 import p2pool, p2pool.data as p2pool_data
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind):
33 work = yield bitcoind.rpc_getmemorypool()
34 except jsonrpc.Error_for_code(-32601): # Method not found
35 print >>sys.stderr, 'Error: Bitcoin version too old! Upgrade to v0.5 or newer!'
36 raise deferral.RetrySilentlyException()
37 packed_transactions = [x.decode('hex') for x in work['transactions']]
38 if 'height' not in work:
39 work['height'] = (yield bitcoind.rpc_getblock(work['previousblockhash']))['height'] + 1
40 defer.returnValue(dict(
41 version=work['version'],
42 previous_block=int(work['previousblockhash'], 16),
43 transactions=map(bitcoin_data.tx_type.unpack, packed_transactions),
44 merkle_link=bitcoin_data.calculate_merkle_link([None] + map(bitcoin_data.hash256, packed_transactions), 0),
45 subsidy=work['coinbasevalue'],
47 bits=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
48 coinbaseflags=work['coinbaseflags'].decode('hex') if 'coinbaseflags' in work else ''.join(x.decode('hex') for x in work['coinbaseaux'].itervalues()) if 'coinbaseaux' in work else '',
49 height=work['height'],
50 clock_offset=time.time() - work['time'],
51 last_update=time.time(),
54 @defer.inlineCallbacks
55 def main(args, net, datadir_path, merged_urls, worker_endpoint):
57 print 'p2pool (version %s)' % (p2pool.__version__,)
60 # connect to bitcoind over JSON-RPC and do initial getmemorypool
61 url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
62 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
63 bitcoind = jsonrpc.Proxy(url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30)
64 @deferral.retry('Error while checking Bitcoin connection:', 1)
65 @defer.inlineCallbacks
67 if not (yield net.PARENT.RPC_CHECK(bitcoind)):
68 print >>sys.stderr, " Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
69 raise deferral.RetrySilentlyException()
70 temp_work = yield getwork(bitcoind)
71 if not net.VERSION_CHECK((yield bitcoind.rpc_getinfo())['version'], temp_work):
72 print >>sys.stderr, ' Bitcoin version too old! BIP16 support required! Upgrade to 0.6.0rc4 or greater!'
73 raise deferral.RetrySilentlyException()
74 defer.returnValue(temp_work)
75 temp_work = yield check()
77 block_height_var = variable.Variable(None)
78 @defer.inlineCallbacks
80 block_height_var.set((yield deferral.retry('Error while calling getblockcount:')(bitcoind.rpc_getblockcount)()))
82 task.LoopingCall(poll_height).start(60*60)
84 bitcoind_warning_var = variable.Variable(None)
85 @defer.inlineCallbacks
87 errors = (yield deferral.retry('Error while calling getmininginfo:')(bitcoind.rpc_getmininginfo)())['errors']
88 bitcoind_warning_var.set(errors if errors != '' else None)
90 task.LoopingCall(poll_warnings).start(20*60)
93 print ' Current block hash: %x' % (temp_work['previous_block'],)
94 print ' Current block height: %i' % (block_height_var.value,)
97 # connect to bitcoind over bitcoin-p2p
98 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
99 factory = bitcoin_p2p.ClientFactory(net.PARENT)
100 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
101 yield factory.getProtocol() # waits until handshake is successful
105 print 'Determining payout address...'
106 if args.pubkey_hash is None:
107 address_path = os.path.join(datadir_path, 'cached_payout_address')
109 if os.path.exists(address_path):
110 with open(address_path, 'rb') as f:
111 address = f.read().strip('\r\n')
112 print ' Loaded cached address: %s...' % (address,)
116 if address is not None:
117 res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))()
118 if not res['isvalid'] or not res['ismine']:
119 print ' Cached address is either invalid or not controlled by local bitcoind!'
123 print ' Getting payout address from bitcoind...'
124 address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()
126 with open(address_path, 'wb') as f:
129 my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(address, net.PARENT)
131 my_pubkey_hash = args.pubkey_hash
132 print ' ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)
135 my_share_hashes = set()
136 my_doa_share_hashes = set()
138 tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes)
139 shared_share_hashes = set()
140 ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
141 known_verified = set()
142 print "Loading shares..."
143 for i, (mode, contents) in enumerate(ss.get_shares()):
145 if contents.hash in tracker.items:
147 shared_share_hashes.add(contents.hash)
148 contents.time_seen = 0
149 tracker.add(contents)
150 if len(tracker.items) % 1000 == 0 and tracker.items:
151 print " %i" % (len(tracker.items),)
152 elif mode == 'verified_hash':
153 known_verified.add(contents)
155 raise AssertionError()
156 print " ...inserting %i verified shares..." % (len(known_verified),)
157 for h in known_verified:
158 if h not in tracker.items:
159 ss.forget_verified_share(h)
161 tracker.verified.add(tracker.items[h])
162 print " ...done loading %i shares!" % (len(tracker.items),)
164 tracker.removed.watch(lambda share: ss.forget_share(share.hash))
165 tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
166 tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
168 print 'Initializing work...'
173 bitcoind_work = variable.Variable((yield getwork(bitcoind)))
174 @defer.inlineCallbacks
177 flag = factory.new_block.get_deferred()
179 bitcoind_work.set((yield getwork(bitcoind)))
182 yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
187 best_block_header = variable.Variable(None)
188 def handle_header(new_header):
189 # check that header matches current target
190 if not (net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(new_header)) <= bitcoind_work.value['bits'].target):
192 bitcoind_best_block = bitcoind_work.value['previous_block']
193 if (best_block_header.value is None
195 new_header['previous_block'] == bitcoind_best_block and
196 bitcoin_data.hash256(bitcoin_data.block_header_type.pack(best_block_header.value)) == bitcoind_best_block
197 ) # new is child of current and previous is current
199 bitcoin_data.hash256(bitcoin_data.block_header_type.pack(new_header)) == bitcoind_best_block and
200 best_block_header.value['previous_block'] != bitcoind_best_block
201 )): # new is current and previous is not a child of current
202 best_block_header.set(new_header)
203 @defer.inlineCallbacks
205 handle_header((yield factory.conn.value.get_block_header(bitcoind_work.value['previous_block'])))
206 bitcoind_work.changed.watch(lambda _: poll_header())
207 yield deferral.retry('Error while requesting best block header:')(poll_header)()
211 get_height_rel_highest = yield height_tracker.get_height_rel_highest_func(bitcoind, factory, lambda: bitcoind_work.value['previous_block'], net)
212 requested = expiring_dict.ExpiringDict(300)
213 peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
215 best_share_var = variable.Variable(None)
216 def set_best_share():
217 best, desired = tracker.think(get_height_rel_highest, bitcoind_work.value['previous_block'], bitcoind_work.value['bits'])
219 best_share_var.set(best)
222 for peer2, share_hash in desired:
223 if share_hash not in tracker.tails: # was received in the time tracker.think was running
225 last_request_time, count = requested.get(share_hash, (None, 0))
226 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
228 potential_peers = set()
229 for head in tracker.tails[share_hash]:
230 potential_peers.update(peer_heads.get(head, set()))
231 potential_peers = [peer for peer in potential_peers if peer.connected2]
232 if count == 0 and peer2 is not None and peer2.connected2:
235 peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
239 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
243 stops=list(set(tracker.heads) | set(
244 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
247 requested[share_hash] = t, count + 1
248 bitcoind_work.changed.watch(lambda _: set_best_share())
255 # setup p2p logic and join p2pool network
257 class Node(p2p.Node):
258 def handle_shares(self, shares, peer):
260 print 'Processing %i shares from %s...' % (len(shares), '%s:%i' % peer.addr if peer is not None else None)
264 if share.hash in tracker.items:
265 #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
270 #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
274 if shares and peer is not None:
275 peer_heads.setdefault(shares[0].hash, set()).add(peer)
281 print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.items), 2*net.CHAIN_LENGTH)
283 def handle_share_hashes(self, hashes, peer):
286 for share_hash in hashes:
287 if share_hash in tracker.items:
289 last_request_time, count = requested.get(share_hash, (None, 0))
290 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
292 print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
293 get_hashes.append(share_hash)
294 requested[share_hash] = t, count + 1
296 if hashes and peer is not None:
297 peer_heads.setdefault(hashes[0], set()).add(peer)
299 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
301 def handle_get_shares(self, hashes, parents, stops, peer):
302 parents = min(parents, 1000//len(hashes))
305 for share_hash in hashes:
306 for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))):
307 if share.hash in stops:
310 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
313 def handle_bestblock(self, header, peer):
314 if net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header)) > header['bits'].target:
315 raise p2p.PeerMisbehavingError('received block header fails PoW test')
316 handle_header(header)
318 @deferral.retry('Error submitting primary block: (will retry)', 10, 10)
319 def submit_block_p2p(block):
320 if factory.conn.value is None:
321 print >>sys.stderr, 'No bitcoind connection when block submittal attempted! %s%32x' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, bitcoin_data.hash256(bitcoin_data.block_header_type.pack(block['header'])))
322 raise deferral.RetrySilentlyException()
323 factory.conn.value.send_block(block=block)
325 @deferral.retry('Error submitting block: (will retry)', 10, 10)
326 @defer.inlineCallbacks
327 def submit_block_rpc(block, ignore_failure):
328 success = yield bitcoind.rpc_getmemorypool(bitcoin_data.block_type.pack(block).encode('hex'))
329 success_expected = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(block['header'])) <= block['header']['bits'].target
330 if (not success and success_expected and not ignore_failure) or (success and not success_expected):
331 print >>sys.stderr, 'Block submittal result: %s Expected: %s' % (success, success_expected)
333 def submit_block(block, ignore_failure):
334 submit_block_p2p(block)
335 submit_block_rpc(block, ignore_failure)
337 @tracker.verified.added.watch
339 if share.pow_hash <= share.header['bits'].target:
340 submit_block(share.as_block(tracker), ignore_failure=True)
342 print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % (p2pool_data.format_hash(share.hash), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
345 if (get_height_rel_highest(share.header['previous_block']) > -5 or
346 bitcoind_work.value['previous_block'] in [share.header['previous_block'], share.header_hash]):
347 broadcast_share(share.hash)
349 reactor.callLater(5, spread) # so get_height_rel_highest can update
351 print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
353 @defer.inlineCallbacks
356 ip, port = x.split(':')
357 defer.returnValue(((yield reactor.resolve(ip)), int(port)))
359 defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT))
362 if os.path.exists(os.path.join(datadir_path, 'addrs')):
364 with open(os.path.join(datadir_path, 'addrs'), 'rb') as f:
365 addrs.update(dict((tuple(k), v) for k, v in json.loads(f.read())))
367 print >>sys.stderr, 'error parsing addrs'
368 for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
371 if addr not in addrs:
372 addrs[addr] = (0, time.time(), time.time())
376 connect_addrs = set()
377 for addr_df in map(parse, args.p2pool_nodes):
379 connect_addrs.add((yield addr_df))
384 best_share_hash_func=lambda: best_share_var.value,
385 port=args.p2pool_port,
388 connect_addrs=connect_addrs,
389 max_incoming_conns=args.p2pool_conns,
394 with open(os.path.join(datadir_path, 'addrs'), 'wb') as f:
395 f.write(json.dumps(p2p_node.addr_store.items()))
396 task.LoopingCall(save_addrs).start(60)
398 @best_block_header.changed.watch
400 for peer in p2p_node.peers.itervalues():
401 peer.send_bestblock(header=header)
403 @defer.inlineCallbacks
404 def broadcast_share(share_hash):
406 for share in tracker.get_chain(share_hash, min(5, tracker.get_height(share_hash))):
407 if share.hash in shared_share_hashes:
409 shared_share_hashes.add(share.hash)
412 for peer in list(p2p_node.peers.itervalues()):
413 yield peer.sendShares([share for share in shares if share.peer is not peer])
415 # send share when the chain changes to their chain
416 best_share_var.changed.watch(broadcast_share)
419 for share in tracker.get_chain(best_share_var.value, min(tracker.get_height(best_share_var.value), 2*net.CHAIN_LENGTH)):
421 if share.hash in tracker.verified.items:
422 ss.add_verified_hash(share.hash)
423 task.LoopingCall(save_shares).start(60)
429 @defer.inlineCallbacks
433 is_lan, lan_ip = yield ipdiscover.get_local_ip()
435 pm = yield portmapper.get_port_mapper()
436 yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
437 except defer.TimeoutError:
441 log.err(None, 'UPnP error:')
442 yield deferral.sleep(random.expovariate(1/120))
445 # start listening for workers with a JSON-RPC server
447 print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1])
449 get_current_txouts = lambda: p2pool_data.get_expected_payouts(tracker, best_share_var.value, bitcoind_work.value['bits'].target, bitcoind_work.value['subsidy'], net)
451 wb = work.WorkerBridge(my_pubkey_hash, net, args.donation_percentage, bitcoind_work, best_block_header, merged_urls, best_share_var, tracker, my_share_hashes, my_doa_share_hashes, args.worker_fee, p2p_node, submit_block, set_best_share, broadcast_share, block_height_var)
452 web_root = web.get_web_root(tracker, bitcoind_work, get_current_txouts, datadir_path, net, wb.get_stale_counts, my_pubkey_hash, wb.local_rate_monitor, args.worker_fee, p2p_node, wb.my_share_hashes, wb.pseudoshare_received, wb.share_received, best_share_var, bitcoind_warning_var)
453 worker_interface.WorkerInterface(wb).attach_to(web_root, get_handler=lambda request: request.redirect('/static/'))
455 deferral.retry('Error binding to worker port:', traceback=False)(reactor.listenTCP)(worker_endpoint[1], server.Site(web_root), interface=worker_endpoint[0])
457 with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
465 print 'Started successfully!'
466 print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % (worker_endpoint[1],)
467 if args.donation_percentage > 0.51:
468 print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % (args.donation_percentage,)
469 elif args.donation_percentage < 0.49:
470 print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % (args.donation_percentage,)
472 print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % (args.donation_percentage,)
473 print 'You can increase this amount with --give-author argument! (or decrease it, if you must)'
477 if hasattr(signal, 'SIGALRM'):
478 signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
479 sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
481 signal.siginterrupt(signal.SIGALRM, False)
482 task.LoopingCall(signal.alarm, 30).start(1)
484 if args.irc_announce:
485 from twisted.words.protocols import irc
486 class IRCClient(irc.IRCClient):
487 nickname = 'p2pool%02i' % (random.randrange(100),)
488 channel = net.ANNOUNCE_CHANNEL
489 def lineReceived(self, line):
492 irc.IRCClient.lineReceived(self, line)
494 irc.IRCClient.signedOn(self)
495 self.factory.resetDelay()
496 self.join(self.channel)
497 @defer.inlineCallbacks
498 def new_share(share):
499 if share.pow_hash <= share.header['bits'].target and abs(share.timestamp - time.time()) < 10*60:
500 yield deferral.sleep(random.expovariate(1/60))
501 message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
502 if all('%x' % (share.header_hash,) not in old_message for old_message in self.recent_messages):
503 self.say(self.channel, message)
504 self._remember_message(message)
505 self.watch_id = tracker.verified.added.watch(new_share)
506 self.recent_messages = []
507 def _remember_message(self, message):
508 self.recent_messages.append(message)
509 while len(self.recent_messages) > 100:
510 self.recent_messages.pop(0)
511 def privmsg(self, user, channel, message):
512 if channel == self.channel:
513 self._remember_message(message)
514 def connectionLost(self, reason):
515 tracker.verified.added.unwatch(self.watch_id)
516 print 'IRC connection lost:', reason.getErrorMessage()
517 class IRCClientFactory(protocol.ReconnectingClientFactory):
519 reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())
521 @defer.inlineCallbacks
526 yield deferral.sleep(3)
528 height = tracker.get_height(best_share_var.value)
529 this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
531 len(tracker.verified.items),
534 sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
535 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
537 datums, dt = wb.local_rate_monitor.get_datums_in_last()
538 my_att_s = sum(datum['work']/dt for datum in datums)
539 this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
540 math.format(int(my_att_s)),
542 math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95),
543 math.format_dt(2**256 / tracker.items[best_share_var.value].max_target / my_att_s) if my_att_s and best_share_var.value else '???',
547 (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()
548 stale_prop = p2pool_data.get_average_stale_prop(tracker, best_share_var.value, min(60*60//net.SHARE_PERIOD, height))
549 real_att_s = p2pool_data.get_pool_attempts_per_second(tracker, best_share_var.value, min(height - 1, 60*60//net.SHARE_PERIOD)) / (1 - stale_prop)
551 this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % (
552 shares, stale_orphan_shares, stale_doa_shares,
553 math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
554 math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
555 get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8, net.PARENT.SYMBOL,
557 this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
558 math.format(int(real_att_s)),
560 math.format_dt(2**256 / bitcoind_work.value['bits'].target / real_att_s),
563 for warning in p2pool_data.get_warnings(tracker, best_share_var.value, net, bitcoind_warning_var.value, bitcoind_work.value):
564 print >>sys.stderr, '#'*40
565 print >>sys.stderr, '>>> Warning: ' + warning
566 print >>sys.stderr, '#'*40
568 if this_str != last_str or time.time() > last_time + 15:
571 last_time = time.time()
577 log.err(None, 'Fatal error:')
580 realnets = dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
582 parser = fixargparse.FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
583 parser.add_argument('--version', action='version', version=p2pool.__version__)
584 parser.add_argument('--net',
585 help='use specified network (default: bitcoin)',
586 action='store', choices=sorted(realnets), default='bitcoin', dest='net_name')
587 parser.add_argument('--testnet',
588 help='''use the network's testnet''',
589 action='store_const', const=True, default=False, dest='testnet')
590 parser.add_argument('--debug',
591 help='enable debugging mode',
592 action='store_const', const=True, default=False, dest='debug')
593 parser.add_argument('-a', '--address',
594 help='generate payouts to this address (default: <address requested from bitcoind>)',
595 type=str, action='store', default=None, dest='address')
596 parser.add_argument('--datadir',
597 help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
598 type=str, action='store', default=None, dest='datadir')
599 parser.add_argument('--logfile',
600 help='''log to this file (default: data/<NET>/log)''',
601 type=str, action='store', default=None, dest='logfile')
602 parser.add_argument('--merged',
603 help='call getauxblock on this url to get work for merged mining (example: http://ncuser:ncpass@127.0.0.1:10332/)',
604 type=str, action='append', default=[], dest='merged_urls')
605 parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
606 help='donate this percentage of work towards the development of p2pool (default: 0.5)',
607 type=float, action='store', default=0.5, dest='donation_percentage')
608 parser.add_argument('--iocp',
609 help='use Windows IOCP API in order to avoid errors due to large number of sockets being open',
610 action='store_true', default=False, dest='iocp')
611 parser.add_argument('--irc-announce',
612 help='announce any blocks found on irc://irc.freenode.net/#p2pool',
613 action='store_true', default=False, dest='irc_announce')
614 parser.add_argument('--no-bugreport',
615 help='disable submitting caught exceptions to the author',
616 action='store_true', default=False, dest='no_bugreport')
618 p2pool_group = parser.add_argument_group('p2pool interface')
619 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
620 help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
621 type=int, action='store', default=None, dest='p2pool_port')
622 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
623 help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
624 type=str, action='append', default=[], dest='p2pool_nodes')
625 parser.add_argument('--disable-upnp',
626 help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
627 action='store_false', default=True, dest='upnp')
628 p2pool_group.add_argument('--max-conns', metavar='CONNS',
629 help='maximum incoming connections (default: 40)',
630 type=int, action='store', default=40, dest='p2pool_conns')
632 worker_group = parser.add_argument_group('worker interface')
633 worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT',
634 help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
635 type=str, action='store', default=None, dest='worker_endpoint')
636 worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
637 help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
638 type=float, action='store', default=0, dest='worker_fee')
640 bitcoind_group = parser.add_argument_group('bitcoind interface')
641 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
642 help='connect to this address (default: 127.0.0.1)',
643 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
644 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
645 help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
646 type=int, action='store', default=None, dest='bitcoind_rpc_port')
647 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
648 help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
649 type=int, action='store', default=None, dest='bitcoind_p2p_port')
651 bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
652 help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
653 type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
655 args = parser.parse_args()
659 defer.setDebugging(True)
661 net_name = args.net_name + ('_testnet' if args.testnet else '')
662 net = networks.nets[net_name]
664 datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
665 if not os.path.exists(datadir_path):
666 os.makedirs(datadir_path)
668 if len(args.bitcoind_rpc_userpass) > 2:
669 parser.error('a maximum of two arguments are allowed')
670 args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
672 if args.bitcoind_rpc_password is None:
673 conf_path = net.PARENT.CONF_FILE_FUNC()
674 if not os.path.exists(conf_path):
675 parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
676 '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
679 '''rpcpassword=%x\r\n'''
681 '''Keep that password secret! After creating the file, restart Bitcoin.''' % (conf_path, random.randrange(2**128)))
682 with open(conf_path, 'rb') as f:
683 cp = ConfigParser.RawConfigParser()
684 cp.readfp(StringIO.StringIO('[x]\r\n' + f.read()))
685 for conf_name, var_name, var_type in [
686 ('rpcuser', 'bitcoind_rpc_username', str),
687 ('rpcpassword', 'bitcoind_rpc_password', str),
688 ('rpcport', 'bitcoind_rpc_port', int),
689 ('port', 'bitcoind_p2p_port', int),
691 if getattr(args, var_name) is None and cp.has_option('x', conf_name):
692 setattr(args, var_name, var_type(cp.get('x', conf_name)))
693 if args.bitcoind_rpc_password is None:
694 parser.error('''Bitcoin configuration file didn't contain an rpcpassword= line! Add one!''')
696 if args.bitcoind_rpc_username is None:
697 args.bitcoind_rpc_username = ''
699 if args.bitcoind_rpc_port is None:
700 args.bitcoind_rpc_port = net.PARENT.RPC_PORT
702 if args.bitcoind_p2p_port is None:
703 args.bitcoind_p2p_port = net.PARENT.P2P_PORT
705 if args.p2pool_port is None:
706 args.p2pool_port = net.P2P_PORT
708 if args.worker_endpoint is None:
709 worker_endpoint = '', net.WORKER_PORT
710 elif ':' not in args.worker_endpoint:
711 worker_endpoint = '', int(args.worker_endpoint)
713 addr, port = args.worker_endpoint.rsplit(':', 1)
714 worker_endpoint = addr, int(port)
716 if args.address is not None:
718 args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
720 parser.error('error parsing address: ' + repr(e))
722 args.pubkey_hash = None
724 def separate_url(url):
725 s = urlparse.urlsplit(url)
726 if '@' not in s.netloc:
727 parser.error('merged url netloc must contain an "@"')
728 userpass, new_netloc = s.netloc.rsplit('@', 1)
729 return urlparse.urlunsplit(s._replace(netloc=new_netloc)), userpass
730 merged_urls = map(separate_url, args.merged_urls)
732 if args.logfile is None:
733 args.logfile = os.path.join(datadir_path, 'log')
735 logfile = logging.LogFile(args.logfile)
736 pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
737 sys.stdout = logging.AbortPipe(pipe)
738 sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
739 if hasattr(signal, "SIGUSR1"):
740 def sigusr1(signum, frame):
741 print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
743 print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
744 signal.signal(signal.SIGUSR1, sigusr1)
745 task.LoopingCall(logfile.reopen).start(5)
747 class ErrorReporter(object):
749 self.last_sent = None
751 def emit(self, eventDict):
752 if not eventDict["isError"]:
755 if self.last_sent is not None and time.time() < self.last_sent + 5:
757 self.last_sent = time.time()
759 if 'failure' in eventDict:
760 text = ((eventDict.get('why') or 'Unhandled Error')
761 + '\n' + eventDict['failure'].getTraceback())
763 text = " ".join([str(m) for m in eventDict["message"]]) + "\n"
765 from twisted.web import client
767 url='http://u.forre.st/p2pool_error.cgi',
769 postdata=p2pool.__version__ + ' ' + net.NAME + '\n' + text,
771 ).addBoth(lambda x: None)
772 if not args.no_bugreport:
773 log.addObserver(ErrorReporter().emit)
775 reactor.callWhenRunning(main, args, net, datadir_path, merged_urls, worker_endpoint)