1 from __future__ import division
15 if '--iocp' in sys.argv:
16 from twisted.internet import iocpreactor
18 from twisted.internet import defer, reactor, protocol, task
19 from twisted.web import server
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
23 import bitcoin.p2p as bitcoin_p2p, bitcoin.data as bitcoin_data
24 from bitcoin import worker_interface, height_tracker
25 from util import fixargparse, jsonrpc, variable, deferral, math, logging
26 from . import p2p, networks, web, work
27 import p2pool, p2pool.data as p2pool_data
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind, use_getblocktemplate=False):
33 if use_getblocktemplate:
34 return bitcoind.rpc_getblocktemplate(dict(mode='template'))
36 return bitcoind.rpc_getmemorypool()
39 except jsonrpc.Error_for_code(-32601): # Method not found
40 use_getblocktemplate = not use_getblocktemplate
43 except jsonrpc.Error_for_code(-32601): # Method not found
44 print >>sys.stderr, 'Error: Bitcoin version too old! Upgrade to v0.5 or newer!'
45 raise deferral.RetrySilentlyException()
46 packed_transactions = [(x['data'] if isinstance(x, dict) else x).decode('hex') for x in work['transactions']]
47 if 'height' not in work:
48 work['height'] = (yield bitcoind.rpc_getblock(work['previousblockhash']))['height'] + 1
50 assert work['height'] == (yield bitcoind.rpc_getblock(work['previousblockhash']))['height'] + 1
51 defer.returnValue(dict(
52 version=work['version'],
53 previous_block=int(work['previousblockhash'], 16),
54 transactions=map(bitcoin_data.tx_type.unpack, packed_transactions),
55 merkle_link=bitcoin_data.calculate_merkle_link([None] + map(bitcoin_data.hash256, packed_transactions), 0),
56 subsidy=work['coinbasevalue'],
57 time=work['time'] if 'time' in work else work['curtime'],
58 bits=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
59 coinbaseflags=work['coinbaseflags'].decode('hex') if 'coinbaseflags' in work else ''.join(x.decode('hex') for x in work['coinbaseaux'].itervalues()) if 'coinbaseaux' in work else '',
60 height=work['height'],
61 last_update=time.time(),
62 use_getblocktemplate=use_getblocktemplate,
65 @defer.inlineCallbacks
66 def main(args, net, datadir_path, merged_urls, worker_endpoint):
68 print 'p2pool (version %s)' % (p2pool.__version__,)
71 traffic_happened = variable.Event()
73 # connect to bitcoind over JSON-RPC and do initial getmemorypool
74 url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http', args.bitcoind_address, args.bitcoind_rpc_port)
75 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
76 bitcoind = jsonrpc.Proxy(url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30)
77 @deferral.retry('Error while checking Bitcoin connection:', 1)
78 @defer.inlineCallbacks
80 if not (yield net.PARENT.RPC_CHECK(bitcoind)):
81 print >>sys.stderr, " Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
82 raise deferral.RetrySilentlyException()
83 if not net.VERSION_CHECK((yield bitcoind.rpc_getinfo())['version']):
84 print >>sys.stderr, ' Bitcoin version too old! Upgrade to 0.6.4 or newer!'
85 raise deferral.RetrySilentlyException()
87 temp_work = yield getwork(bitcoind)
89 block_height_var = variable.Variable(None)
90 @defer.inlineCallbacks
92 block_height_var.set((yield deferral.retry('Error while calling getblockcount:')(bitcoind.rpc_getblockcount)()))
94 task.LoopingCall(poll_height).start(60*60)
96 bitcoind_warning_var = variable.Variable(None)
97 @defer.inlineCallbacks
99 errors = (yield deferral.retry('Error while calling getmininginfo:')(bitcoind.rpc_getmininginfo)())['errors']
100 bitcoind_warning_var.set(errors if errors != '' else None)
101 yield poll_warnings()
102 task.LoopingCall(poll_warnings).start(20*60)
105 print ' Current block hash: %x' % (temp_work['previous_block'],)
106 print ' Current block height: %i' % (block_height_var.value,)
109 # connect to bitcoind over bitcoin-p2p
110 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
111 factory = bitcoin_p2p.ClientFactory(net.PARENT)
112 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
113 yield factory.getProtocol() # waits until handshake is successful
117 print 'Determining payout address...'
118 if args.pubkey_hash is None:
119 address_path = os.path.join(datadir_path, 'cached_payout_address')
121 if os.path.exists(address_path):
122 with open(address_path, 'rb') as f:
123 address = f.read().strip('\r\n')
124 print ' Loaded cached address: %s...' % (address,)
128 if address is not None:
129 res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))()
130 if not res['isvalid'] or not res['ismine']:
131 print ' Cached address is either invalid or not controlled by local bitcoind!'
135 print ' Getting payout address from bitcoind...'
136 address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()
138 with open(address_path, 'wb') as f:
141 my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(address, net.PARENT)
143 my_pubkey_hash = args.pubkey_hash
144 print ' ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)
147 my_share_hashes = set()
148 my_doa_share_hashes = set()
150 tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes)
151 shared_share_hashes = set()
152 ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
153 known_verified = set()
154 print "Loading shares..."
155 for i, (mode, contents) in enumerate(ss.get_shares()):
157 if contents.hash in tracker.items:
159 shared_share_hashes.add(contents.hash)
160 contents.time_seen = 0
161 tracker.add(contents)
162 if len(tracker.items) % 1000 == 0 and tracker.items:
163 print " %i" % (len(tracker.items),)
164 elif mode == 'verified_hash':
165 known_verified.add(contents)
167 raise AssertionError()
168 print " ...inserting %i verified shares..." % (len(known_verified),)
169 for h in known_verified:
170 if h not in tracker.items:
171 ss.forget_verified_share(h)
173 tracker.verified.add(tracker.items[h])
174 print " ...done loading %i shares!" % (len(tracker.items),)
176 tracker.removed.watch(lambda share: ss.forget_share(share.hash))
177 tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
178 tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
180 print 'Initializing work...'
185 bitcoind_work = variable.Variable((yield getwork(bitcoind)))
186 @defer.inlineCallbacks
189 flag = factory.new_block.get_deferred()
191 bitcoind_work.set((yield getwork(bitcoind, bitcoind_work.value['use_getblocktemplate'])))
194 yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
199 best_block_header = variable.Variable(None)
200 def handle_header(new_header):
201 # check that header matches current target
202 if not (net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(new_header)) <= bitcoind_work.value['bits'].target):
204 bitcoind_best_block = bitcoind_work.value['previous_block']
205 if (best_block_header.value is None
207 new_header['previous_block'] == bitcoind_best_block and
208 bitcoin_data.hash256(bitcoin_data.block_header_type.pack(best_block_header.value)) == bitcoind_best_block
209 ) # new is child of current and previous is current
211 bitcoin_data.hash256(bitcoin_data.block_header_type.pack(new_header)) == bitcoind_best_block and
212 best_block_header.value['previous_block'] != bitcoind_best_block
213 )): # new is current and previous is not a child of current
214 best_block_header.set(new_header)
215 @defer.inlineCallbacks
217 handle_header((yield factory.conn.value.get_block_header(bitcoind_work.value['previous_block'])))
218 bitcoind_work.changed.watch(lambda _: poll_header())
219 yield deferral.retry('Error while requesting best block header:')(poll_header)()
223 get_height_rel_highest = yield height_tracker.get_height_rel_highest_func(bitcoind, factory, lambda: bitcoind_work.value['previous_block'], net)
225 best_share_var = variable.Variable(None)
226 desired_var = variable.Variable(None)
227 def set_best_share():
228 best, desired = tracker.think(get_height_rel_highest, bitcoind_work.value['previous_block'], bitcoind_work.value['bits'])
230 best_share_var.set(best)
231 desired_var.set(desired)
232 bitcoind_work.changed.watch(lambda _: set_best_share())
238 # setup p2p logic and join p2pool network
240 class Node(p2p.Node):
241 def handle_shares(self, shares, peer):
243 print 'Processing %i shares from %s...' % (len(shares), '%s:%i' % peer.addr if peer is not None else None)
247 if share.hash in tracker.items:
248 #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
253 #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
261 print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.items), 2*net.CHAIN_LENGTH)
263 @defer.inlineCallbacks
264 def handle_share_hashes(self, hashes, peer):
265 new_hashes = [x for x in hashes if x not in tracker.items]
269 shares = yield peer.get_shares(
275 log.err(None, 'in handle_share_hashes:')
277 self.handle_shares(shares, peer)
279 def handle_get_shares(self, hashes, parents, stops, peer):
280 parents = min(parents, 1000//len(hashes))
283 for share_hash in hashes:
284 for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))):
285 if share.hash in stops:
288 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
291 def handle_bestblock(self, header, peer):
292 if net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header)) > header['bits'].target:
293 raise p2p.PeerMisbehavingError('received block header fails PoW test')
294 handle_header(header)
296 @deferral.retry('Error submitting primary block: (will retry)', 10, 10)
297 def submit_block_p2p(block):
298 if factory.conn.value is None:
299 print >>sys.stderr, 'No bitcoind connection when block submittal attempted! %s%064x' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, bitcoin_data.hash256(bitcoin_data.block_header_type.pack(block['header'])))
300 raise deferral.RetrySilentlyException()
301 factory.conn.value.send_block(block=block)
303 @deferral.retry('Error submitting block: (will retry)', 10, 10)
304 @defer.inlineCallbacks
305 def submit_block_rpc(block, ignore_failure):
306 if bitcoind_work.value['use_getblocktemplate']:
307 result = yield bitcoind.rpc_submitblock(bitcoin_data.block_type.pack(block).encode('hex'))
308 success = result is None
310 result = yield bitcoind.rpc_getmemorypool(bitcoin_data.block_type.pack(block).encode('hex'))
312 success_expected = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(block['header'])) <= block['header']['bits'].target
313 if (not success and success_expected and not ignore_failure) or (success and not success_expected):
314 print >>sys.stderr, 'Block submittal result: %s (%r) Expected: %s' % (success, result, success_expected)
316 def submit_block(block, ignore_failure):
317 submit_block_p2p(block)
318 submit_block_rpc(block, ignore_failure)
320 @tracker.verified.added.watch
322 if share.pow_hash <= share.header['bits'].target:
323 submit_block(share.as_block(tracker), ignore_failure=True)
325 print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % (p2pool_data.format_hash(share.hash), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
328 if (get_height_rel_highest(share.header['previous_block']) > -5 or
329 bitcoind_work.value['previous_block'] in [share.header['previous_block'], share.header_hash]):
330 broadcast_share(share.hash)
332 reactor.callLater(5, spread) # so get_height_rel_highest can update
334 print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
336 @defer.inlineCallbacks
339 ip, port = x.split(':')
340 defer.returnValue(((yield reactor.resolve(ip)), int(port)))
342 defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT))
345 if os.path.exists(os.path.join(datadir_path, 'addrs')):
347 with open(os.path.join(datadir_path, 'addrs'), 'rb') as f:
348 addrs.update(dict((tuple(k), v) for k, v in json.loads(f.read())))
350 print >>sys.stderr, 'error parsing addrs'
351 for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
354 if addr not in addrs:
355 addrs[addr] = (0, time.time(), time.time())
359 connect_addrs = set()
360 for addr_df in map(parse, args.p2pool_nodes):
362 connect_addrs.add((yield addr_df))
367 best_share_hash_func=lambda: best_share_var.value,
368 port=args.p2pool_port,
371 connect_addrs=connect_addrs,
372 max_incoming_conns=args.p2pool_conns,
373 traffic_happened=traffic_happened,
378 with open(os.path.join(datadir_path, 'addrs'), 'wb') as f:
379 f.write(json.dumps(p2p_node.addr_store.items()))
380 task.LoopingCall(save_addrs).start(60)
382 @best_block_header.changed.watch
384 for peer in p2p_node.peers.itervalues():
385 peer.send_bestblock(header=header)
387 @defer.inlineCallbacks
388 def broadcast_share(share_hash):
390 for share in tracker.get_chain(share_hash, min(5, tracker.get_height(share_hash))):
391 if share.hash in shared_share_hashes:
393 shared_share_hashes.add(share.hash)
396 for peer in list(p2p_node.peers.itervalues()):
397 yield peer.sendShares([share for share in shares if share.peer is not peer])
399 # send share when the chain changes to their chain
400 best_share_var.changed.watch(broadcast_share)
403 for share in tracker.get_chain(best_share_var.value, min(tracker.get_height(best_share_var.value), 2*net.CHAIN_LENGTH)):
405 if share.hash in tracker.verified.items:
406 ss.add_verified_hash(share.hash)
407 task.LoopingCall(save_shares).start(60)
410 @defer.inlineCallbacks
411 def download_shares():
413 desired = yield desired_var.get_when_satisfies(lambda val: len(val) != 0)
414 peer2, share_hash = random.choice(desired)
416 if len(p2p_node.peers) == 0:
417 yield deferral.sleep(1)
419 peer = random.choice(p2p_node.peers.values())
421 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
423 shares = yield peer.get_shares(
429 log.err(None, 'in download_shares:')
433 yield deferral.sleep(1) # sleep so we don't keep rerequesting the same share nobody has
435 p2p_node.handle_shares(shares, peer)
441 @defer.inlineCallbacks
445 is_lan, lan_ip = yield ipdiscover.get_local_ip()
447 pm = yield portmapper.get_port_mapper()
448 yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
449 except defer.TimeoutError:
453 log.err(None, 'UPnP error:')
454 yield deferral.sleep(random.expovariate(1/120))
457 # start listening for workers with a JSON-RPC server
459 print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1])
461 get_current_txouts = lambda: p2pool_data.get_expected_payouts(tracker, best_share_var.value, bitcoind_work.value['bits'].target, bitcoind_work.value['subsidy'], net)
463 wb = work.WorkerBridge(my_pubkey_hash, net, args.donation_percentage, bitcoind_work, best_block_header, merged_urls, best_share_var, tracker, my_share_hashes, my_doa_share_hashes, args.worker_fee, p2p_node, submit_block, set_best_share, broadcast_share, block_height_var)
464 web_root = web.get_web_root(tracker, bitcoind_work, get_current_txouts, datadir_path, net, wb.get_stale_counts, my_pubkey_hash, wb.local_rate_monitor, args.worker_fee, p2p_node, wb.my_share_hashes, wb.pseudoshare_received, wb.share_received, best_share_var, bitcoind_warning_var, traffic_happened)
465 worker_interface.WorkerInterface(wb).attach_to(web_root, get_handler=lambda request: request.redirect('/static/'))
467 deferral.retry('Error binding to worker port:', traceback=False)(reactor.listenTCP)(worker_endpoint[1], server.Site(web_root), interface=worker_endpoint[0])
469 with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
477 print 'Started successfully!'
478 print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % (worker_endpoint[1],)
479 if args.donation_percentage > 0.51:
480 print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % (args.donation_percentage,)
481 elif args.donation_percentage < 0.49:
482 print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % (args.donation_percentage,)
484 print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % (args.donation_percentage,)
485 print 'You can increase this amount with --give-author argument! (or decrease it, if you must)'
489 if hasattr(signal, 'SIGALRM'):
490 signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
491 sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
493 signal.siginterrupt(signal.SIGALRM, False)
494 task.LoopingCall(signal.alarm, 30).start(1)
496 if args.irc_announce:
497 from twisted.words.protocols import irc
498 class IRCClient(irc.IRCClient):
499 nickname = 'p2pool%02i' % (random.randrange(100),)
500 channel = net.ANNOUNCE_CHANNEL
501 def lineReceived(self, line):
504 irc.IRCClient.lineReceived(self, line)
506 irc.IRCClient.signedOn(self)
507 self.factory.resetDelay()
508 self.join(self.channel)
509 @defer.inlineCallbacks
510 def new_share(share):
511 if share.pow_hash <= share.header['bits'].target and abs(share.timestamp - time.time()) < 10*60:
512 yield deferral.sleep(random.expovariate(1/60))
513 message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
514 if all('%x' % (share.header_hash,) not in old_message for old_message in self.recent_messages):
515 self.say(self.channel, message)
516 self._remember_message(message)
517 self.watch_id = tracker.verified.added.watch(new_share)
518 self.recent_messages = []
519 def _remember_message(self, message):
520 self.recent_messages.append(message)
521 while len(self.recent_messages) > 100:
522 self.recent_messages.pop(0)
523 def privmsg(self, user, channel, message):
524 if channel == self.channel:
525 self._remember_message(message)
526 def connectionLost(self, reason):
527 tracker.verified.added.unwatch(self.watch_id)
528 print 'IRC connection lost:', reason.getErrorMessage()
529 class IRCClientFactory(protocol.ReconnectingClientFactory):
531 reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())
533 @defer.inlineCallbacks
538 yield deferral.sleep(3)
540 height = tracker.get_height(best_share_var.value)
541 this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
543 len(tracker.verified.items),
546 sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
547 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
549 datums, dt = wb.local_rate_monitor.get_datums_in_last()
550 my_att_s = sum(datum['work']/dt for datum in datums)
551 this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
552 math.format(int(my_att_s)),
554 math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95),
555 math.format_dt(2**256 / tracker.items[best_share_var.value].max_target / my_att_s) if my_att_s and best_share_var.value else '???',
559 (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()
560 stale_prop = p2pool_data.get_average_stale_prop(tracker, best_share_var.value, min(60*60//net.SHARE_PERIOD, height))
561 real_att_s = p2pool_data.get_pool_attempts_per_second(tracker, best_share_var.value, min(height - 1, 60*60//net.SHARE_PERIOD)) / (1 - stale_prop)
563 this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % (
564 shares, stale_orphan_shares, stale_doa_shares,
565 math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
566 math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
567 get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8, net.PARENT.SYMBOL,
569 this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
570 math.format(int(real_att_s)),
572 math.format_dt(2**256 / bitcoind_work.value['bits'].target / real_att_s),
575 for warning in p2pool_data.get_warnings(tracker, best_share_var.value, net, bitcoind_warning_var.value, bitcoind_work.value):
576 print >>sys.stderr, '#'*40
577 print >>sys.stderr, '>>> Warning: ' + warning
578 print >>sys.stderr, '#'*40
580 if this_str != last_str or time.time() > last_time + 15:
583 last_time = time.time()
589 log.err(None, 'Fatal error:')
592 realnets = dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
594 parser = fixargparse.FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
595 parser.add_argument('--version', action='version', version=p2pool.__version__)
596 parser.add_argument('--net',
597 help='use specified network (default: bitcoin)',
598 action='store', choices=sorted(realnets), default='bitcoin', dest='net_name')
599 parser.add_argument('--testnet',
600 help='''use the network's testnet''',
601 action='store_const', const=True, default=False, dest='testnet')
602 parser.add_argument('--debug',
603 help='enable debugging mode',
604 action='store_const', const=True, default=False, dest='debug')
605 parser.add_argument('-a', '--address',
606 help='generate payouts to this address (default: <address requested from bitcoind>)',
607 type=str, action='store', default=None, dest='address')
608 parser.add_argument('--datadir',
609 help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
610 type=str, action='store', default=None, dest='datadir')
611 parser.add_argument('--logfile',
612 help='''log to this file (default: data/<NET>/log)''',
613 type=str, action='store', default=None, dest='logfile')
614 parser.add_argument('--merged',
615 help='call getauxblock on this url to get work for merged mining (example: http://ncuser:ncpass@127.0.0.1:10332/)',
616 type=str, action='append', default=[], dest='merged_urls')
617 parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
618 help='donate this percentage of work towards the development of p2pool (default: 0.5)',
619 type=float, action='store', default=0.5, dest='donation_percentage')
620 parser.add_argument('--iocp',
621 help='use Windows IOCP API in order to avoid errors due to large number of sockets being open',
622 action='store_true', default=False, dest='iocp')
623 parser.add_argument('--irc-announce',
624 help='announce any blocks found on irc://irc.freenode.net/#p2pool',
625 action='store_true', default=False, dest='irc_announce')
626 parser.add_argument('--no-bugreport',
627 help='disable submitting caught exceptions to the author',
628 action='store_true', default=False, dest='no_bugreport')
630 p2pool_group = parser.add_argument_group('p2pool interface')
631 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
632 help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
633 type=int, action='store', default=None, dest='p2pool_port')
634 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
635 help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
636 type=str, action='append', default=[], dest='p2pool_nodes')
637 parser.add_argument('--disable-upnp',
638 help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
639 action='store_false', default=True, dest='upnp')
640 p2pool_group.add_argument('--max-conns', metavar='CONNS',
641 help='maximum incoming connections (default: 40)',
642 type=int, action='store', default=40, dest='p2pool_conns')
644 worker_group = parser.add_argument_group('worker interface')
645 worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT',
646 help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
647 type=str, action='store', default=None, dest='worker_endpoint')
648 worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
649 help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
650 type=float, action='store', default=0, dest='worker_fee')
652 bitcoind_group = parser.add_argument_group('bitcoind interface')
653 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
654 help='connect to this address (default: 127.0.0.1)',
655 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
656 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
657 help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
658 type=int, action='store', default=None, dest='bitcoind_rpc_port')
659 bitcoind_group.add_argument('--bitcoind-rpc-ssl',
660 help='connect to JSON-RPC interface using SSL',
661 action='store_true', default=False, dest='bitcoind_rpc_ssl')
662 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
663 help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
664 type=int, action='store', default=None, dest='bitcoind_p2p_port')
666 bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
667 help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
668 type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
670 args = parser.parse_args()
674 defer.setDebugging(True)
676 net_name = args.net_name + ('_testnet' if args.testnet else '')
677 net = networks.nets[net_name]
679 datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
680 if not os.path.exists(datadir_path):
681 os.makedirs(datadir_path)
683 if len(args.bitcoind_rpc_userpass) > 2:
684 parser.error('a maximum of two arguments are allowed')
685 args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
687 if args.bitcoind_rpc_password is None:
688 conf_path = net.PARENT.CONF_FILE_FUNC()
689 if not os.path.exists(conf_path):
690 parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
691 '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
694 '''rpcpassword=%x\r\n'''
696 '''Keep that password secret! After creating the file, restart Bitcoin.''' % (conf_path, random.randrange(2**128)))
697 with open(conf_path, 'rb') as f:
698 cp = ConfigParser.RawConfigParser()
699 cp.readfp(StringIO.StringIO('[x]\r\n' + f.read()))
700 for conf_name, var_name, var_type in [
701 ('rpcuser', 'bitcoind_rpc_username', str),
702 ('rpcpassword', 'bitcoind_rpc_password', str),
703 ('rpcport', 'bitcoind_rpc_port', int),
704 ('port', 'bitcoind_p2p_port', int),
706 if getattr(args, var_name) is None and cp.has_option('x', conf_name):
707 setattr(args, var_name, var_type(cp.get('x', conf_name)))
708 if args.bitcoind_rpc_password is None:
709 parser.error('''Bitcoin configuration file didn't contain an rpcpassword= line! Add one!''')
711 if args.bitcoind_rpc_username is None:
712 args.bitcoind_rpc_username = ''
714 if args.bitcoind_rpc_port is None:
715 args.bitcoind_rpc_port = net.PARENT.RPC_PORT
717 if args.bitcoind_p2p_port is None:
718 args.bitcoind_p2p_port = net.PARENT.P2P_PORT
720 if args.p2pool_port is None:
721 args.p2pool_port = net.P2P_PORT
723 if args.worker_endpoint is None:
724 worker_endpoint = '', net.WORKER_PORT
725 elif ':' not in args.worker_endpoint:
726 worker_endpoint = '', int(args.worker_endpoint)
728 addr, port = args.worker_endpoint.rsplit(':', 1)
729 worker_endpoint = addr, int(port)
731 if args.address is not None:
733 args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
735 parser.error('error parsing address: ' + repr(e))
737 args.pubkey_hash = None
739 def separate_url(url):
740 s = urlparse.urlsplit(url)
741 if '@' not in s.netloc:
742 parser.error('merged url netloc must contain an "@"')
743 userpass, new_netloc = s.netloc.rsplit('@', 1)
744 return urlparse.urlunsplit(s._replace(netloc=new_netloc)), userpass
745 merged_urls = map(separate_url, args.merged_urls)
747 if args.logfile is None:
748 args.logfile = os.path.join(datadir_path, 'log')
750 logfile = logging.LogFile(args.logfile)
751 pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
752 sys.stdout = logging.AbortPipe(pipe)
753 sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
754 if hasattr(signal, "SIGUSR1"):
755 def sigusr1(signum, frame):
756 print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
758 print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
759 signal.signal(signal.SIGUSR1, sigusr1)
760 task.LoopingCall(logfile.reopen).start(5)
762 class ErrorReporter(object):
764 self.last_sent = None
766 def emit(self, eventDict):
767 if not eventDict["isError"]:
770 if self.last_sent is not None and time.time() < self.last_sent + 5:
772 self.last_sent = time.time()
774 if 'failure' in eventDict:
775 text = ((eventDict.get('why') or 'Unhandled Error')
776 + '\n' + eventDict['failure'].getTraceback())
778 text = " ".join([str(m) for m in eventDict["message"]]) + "\n"
780 from twisted.web import client
782 url='http://u.forre.st/p2pool_error.cgi',
784 postdata=p2pool.__version__ + ' ' + net.NAME + '\n' + text,
786 ).addBoth(lambda x: None)
787 if not args.no_bugreport:
788 log.addObserver(ErrorReporter().emit)
790 reactor.callWhenRunning(main, args, net, datadir_path, merged_urls, worker_endpoint)