1 from __future__ import division
15 if '--iocp' in sys.argv:
16 from twisted.internet import iocpreactor
18 from twisted.internet import defer, reactor, protocol, task
19 from twisted.web import server
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
23 import bitcoin.p2p as bitcoin_p2p, bitcoin.data as bitcoin_data
24 from bitcoin import worker_interface, height_tracker
25 from util import expiring_dict, fixargparse, jsonrpc, variable, deferral, math, logging
26 from . import p2p, networks, web, work
27 import p2pool, p2pool.data as p2pool_data
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind):
33 work = yield bitcoind.rpc_getmemorypool()
34 except jsonrpc.Error, e:
35 if e.code == -32601: # Method not found
36 print >>sys.stderr, 'Error: Bitcoin version too old! Upgrade to v0.5 or newer!'
37 raise deferral.RetrySilentlyException()
39 packed_transactions = [x.decode('hex') for x in work['transactions']]
40 defer.returnValue(dict(
41 previous_block=int(work['previousblockhash'], 16),
42 transactions=map(bitcoin_data.tx_type.unpack, packed_transactions),
43 merkle_link=bitcoin_data.calculate_merkle_link([None] + map(bitcoin_data.hash256, packed_transactions), 0),
44 subsidy=work['coinbasevalue'],
46 bits=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
47 coinbaseflags=work['coinbaseflags'].decode('hex') if 'coinbaseflags' in work else ''.join(x.decode('hex') for x in work['coinbaseaux'].itervalues()) if 'coinbaseaux' in work else '',
48 clock_offset=time.time() - work['time'],
49 last_update=time.time(),
52 @defer.inlineCallbacks
53 def main(args, net, datadir_path, merged_urls, worker_endpoint):
55 print 'p2pool (version %s)' % (p2pool.__version__,)
58 # connect to bitcoind over JSON-RPC and do initial getmemorypool
59 url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
60 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
61 bitcoind = jsonrpc.Proxy(url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30)
62 @deferral.retry('Error while checking Bitcoin connection:', 1)
63 @defer.inlineCallbacks
65 if not (yield net.PARENT.RPC_CHECK(bitcoind)):
66 print >>sys.stderr, " Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
67 raise deferral.RetrySilentlyException()
68 temp_work = yield getwork(bitcoind)
69 if not net.VERSION_CHECK((yield bitcoind.rpc_getinfo())['version'], temp_work):
70 print >>sys.stderr, ' Bitcoin version too old! BIP16 support required! Upgrade to 0.6.0rc4 or greater!'
71 raise deferral.RetrySilentlyException()
72 defer.returnValue(temp_work)
73 temp_work = yield check()
75 block_height_var = variable.Variable(None)
76 @defer.inlineCallbacks
78 block_height_var.set((yield deferral.retry('Error while calling getblockcount:')(bitcoind.rpc_getblockcount)()))
80 task.LoopingCall(poll_height).start(60*60)
82 bitcoind_warning_var = variable.Variable(None)
83 @defer.inlineCallbacks
85 errors = (yield deferral.retry('Error while calling getmininginfo:')(bitcoind.rpc_getmininginfo)())['errors']
86 bitcoind_warning_var.set(errors if errors != '' else None)
88 task.LoopingCall(poll_warnings).start(20*60)
91 print ' Current block hash: %x' % (temp_work['previous_block'],)
92 print ' Current block height: %i' % (block_height_var.value,)
95 # connect to bitcoind over bitcoin-p2p
96 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
97 factory = bitcoin_p2p.ClientFactory(net.PARENT)
98 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
99 yield factory.getProtocol() # waits until handshake is successful
103 print 'Determining payout address...'
104 if args.pubkey_hash is None:
105 address_path = os.path.join(datadir_path, 'cached_payout_address')
107 if os.path.exists(address_path):
108 with open(address_path, 'rb') as f:
109 address = f.read().strip('\r\n')
110 print ' Loaded cached address: %s...' % (address,)
114 if address is not None:
115 res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))()
116 if not res['isvalid'] or not res['ismine']:
117 print ' Cached address is either invalid or not controlled by local bitcoind!'
121 print ' Getting payout address from bitcoind...'
122 address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()
124 with open(address_path, 'wb') as f:
127 my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(address, net.PARENT)
129 my_pubkey_hash = args.pubkey_hash
130 print ' ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)
133 my_share_hashes = set()
134 my_doa_share_hashes = set()
136 tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes)
137 shared_share_hashes = set()
138 ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
139 known_verified = set()
140 print "Loading shares..."
141 for i, (mode, contents) in enumerate(ss.get_shares()):
143 if contents.hash in tracker.items:
145 shared_share_hashes.add(contents.hash)
146 contents.time_seen = 0
147 tracker.add(contents)
148 if len(tracker.items) % 1000 == 0 and tracker.items:
149 print " %i" % (len(tracker.items),)
150 elif mode == 'verified_hash':
151 known_verified.add(contents)
153 raise AssertionError()
154 print " ...inserting %i verified shares..." % (len(known_verified),)
155 for h in known_verified:
156 if h not in tracker.items:
157 ss.forget_verified_share(h)
159 tracker.verified.add(tracker.items[h])
160 print " ...done loading %i shares!" % (len(tracker.items),)
162 tracker.removed.watch(lambda share: ss.forget_share(share.hash))
163 tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
164 tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
166 print 'Initializing work...'
171 bitcoind_work = variable.Variable((yield getwork(bitcoind)))
172 @defer.inlineCallbacks
175 flag = factory.new_block.get_deferred()
177 bitcoind_work.set((yield getwork(bitcoind)))
180 yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
185 best_block_header = variable.Variable(None)
186 def handle_header(new_header):
187 # check that header matches current target
188 if not (net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(new_header)) <= bitcoind_work.value['bits'].target):
190 bitcoind_best_block = bitcoind_work.value['previous_block']
191 if (best_block_header.value is None
193 new_header['previous_block'] == bitcoind_best_block and
194 bitcoin_data.hash256(bitcoin_data.block_header_type.pack(best_block_header.value)) == bitcoind_best_block
195 ) # new is child of current and previous is current
197 bitcoin_data.hash256(bitcoin_data.block_header_type.pack(new_header)) == bitcoind_best_block and
198 best_block_header.value['previous_block'] != bitcoind_best_block
199 )): # new is current and previous is not a child of current
200 best_block_header.set(new_header)
201 @defer.inlineCallbacks
203 handle_header((yield factory.conn.value.get_block_header(bitcoind_work.value['previous_block'])))
204 bitcoind_work.changed.watch(lambda _: poll_header())
205 yield deferral.retry('Error while requesting best block header:')(poll_header)()
209 get_height_rel_highest = yield height_tracker.get_height_rel_highest_func(bitcoind, factory, lambda: bitcoind_work.value['previous_block'], net)
210 requested = expiring_dict.ExpiringDict(300)
211 peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
213 best_share_var = variable.Variable(None)
214 def set_best_share():
215 best, desired = tracker.think(get_height_rel_highest, bitcoind_work.value['previous_block'], bitcoind_work.value['bits'])
217 best_share_var.set(best)
220 for peer2, share_hash in desired:
221 if share_hash not in tracker.tails: # was received in the time tracker.think was running
223 last_request_time, count = requested.get(share_hash, (None, 0))
224 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
226 potential_peers = set()
227 for head in tracker.tails[share_hash]:
228 potential_peers.update(peer_heads.get(head, set()))
229 potential_peers = [peer for peer in potential_peers if peer.connected2]
230 if count == 0 and peer2 is not None and peer2.connected2:
233 peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
237 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
241 stops=list(set(tracker.heads) | set(
242 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
245 requested[share_hash] = t, count + 1
246 bitcoind_work.changed.watch(lambda _: set_best_share())
253 # setup p2p logic and join p2pool network
255 class Node(p2p.Node):
256 def handle_shares(self, shares, peer):
258 print 'Processing %i shares from %s...' % (len(shares), '%s:%i' % peer.addr if peer is not None else None)
262 if share.hash in tracker.items:
263 #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
268 #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
272 if shares and peer is not None:
273 peer_heads.setdefault(shares[0].hash, set()).add(peer)
279 print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.items), 2*net.CHAIN_LENGTH)
281 def handle_share_hashes(self, hashes, peer):
284 for share_hash in hashes:
285 if share_hash in tracker.items:
287 last_request_time, count = requested.get(share_hash, (None, 0))
288 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
290 print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
291 get_hashes.append(share_hash)
292 requested[share_hash] = t, count + 1
294 if hashes and peer is not None:
295 peer_heads.setdefault(hashes[0], set()).add(peer)
297 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
299 def handle_get_shares(self, hashes, parents, stops, peer):
300 parents = min(parents, 1000//len(hashes))
303 for share_hash in hashes:
304 for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))):
305 if share.hash in stops:
308 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
311 def handle_bestblock(self, header, peer):
312 if net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header)) > header['bits'].target:
313 raise p2p.PeerMisbehavingError('received block header fails PoW test')
314 handle_header(header)
316 @deferral.retry('Error submitting primary block: (will retry)', 10, 10)
317 def submit_block_p2p(block):
318 if factory.conn.value is None:
319 print >>sys.stderr, 'No bitcoind connection when block submittal attempted! %s%32x' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, bitcoin_data.hash256(bitcoin_data.block_header_type.pack(block['header'])))
320 raise deferral.RetrySilentlyException()
321 factory.conn.value.send_block(block=block)
323 @deferral.retry('Error submitting block: (will retry)', 10, 10)
324 @defer.inlineCallbacks
325 def submit_block_rpc(block, ignore_failure):
326 success = yield bitcoind.rpc_getmemorypool(bitcoin_data.block_type.pack(block).encode('hex'))
327 success_expected = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(block['header'])) <= block['header']['bits'].target
328 if (not success and success_expected and not ignore_failure) or (success and not success_expected):
329 print >>sys.stderr, 'Block submittal result: %s Expected: %s' % (success, success_expected)
331 def submit_block(block, ignore_failure):
332 submit_block_p2p(block)
333 submit_block_rpc(block, ignore_failure)
335 @tracker.verified.added.watch
337 if share.pow_hash <= share.header['bits'].target:
338 submit_block(share.as_block(tracker), ignore_failure=True)
340 print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % (p2pool_data.format_hash(share.hash), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
343 if (get_height_rel_highest(share.header['previous_block']) > -5 or
344 bitcoind_work.value['previous_block'] in [share.header['previous_block'], share.header_hash]):
345 broadcast_share(share.hash)
347 reactor.callLater(5, spread) # so get_height_rel_highest can update
349 print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
351 @defer.inlineCallbacks
354 ip, port = x.split(':')
355 defer.returnValue(((yield reactor.resolve(ip)), int(port)))
357 defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT))
360 if os.path.exists(os.path.join(datadir_path, 'addrs')):
362 with open(os.path.join(datadir_path, 'addrs'), 'rb') as f:
363 addrs.update(dict((tuple(k), v) for k, v in json.loads(f.read())))
365 print >>sys.stderr, 'error parsing addrs'
366 for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
369 if addr not in addrs:
370 addrs[addr] = (0, time.time(), time.time())
374 connect_addrs = set()
375 for addr_df in map(parse, args.p2pool_nodes):
377 connect_addrs.add((yield addr_df))
382 best_share_hash_func=lambda: best_share_var.value,
383 port=args.p2pool_port,
386 connect_addrs=connect_addrs,
387 max_incoming_conns=args.p2pool_conns,
392 with open(os.path.join(datadir_path, 'addrs'), 'wb') as f:
393 f.write(json.dumps(p2p_node.addr_store.items()))
394 task.LoopingCall(save_addrs).start(60)
396 @best_block_header.changed.watch
398 for peer in p2p_node.peers.itervalues():
399 peer.send_bestblock(header=header)
401 @defer.inlineCallbacks
402 def broadcast_share(share_hash):
404 for share in tracker.get_chain(share_hash, min(5, tracker.get_height(share_hash))):
405 if share.hash in shared_share_hashes:
407 shared_share_hashes.add(share.hash)
410 for peer in list(p2p_node.peers.itervalues()):
411 yield peer.sendShares([share for share in shares if share.peer is not peer])
413 # send share when the chain changes to their chain
414 best_share_var.changed.watch(broadcast_share)
417 for share in tracker.get_chain(best_share_var.value, min(tracker.get_height(best_share_var.value), 2*net.CHAIN_LENGTH)):
419 if share.hash in tracker.verified.items:
420 ss.add_verified_hash(share.hash)
421 task.LoopingCall(save_shares).start(60)
427 @defer.inlineCallbacks
431 is_lan, lan_ip = yield ipdiscover.get_local_ip()
433 pm = yield portmapper.get_port_mapper()
434 yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
435 except defer.TimeoutError:
439 log.err(None, 'UPnP error:')
440 yield deferral.sleep(random.expovariate(1/120))
443 # start listening for workers with a JSON-RPC server
445 print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1])
447 get_current_txouts = lambda: p2pool_data.get_expected_payouts(tracker, best_share_var.value, bitcoind_work.value['bits'].target, bitcoind_work.value['subsidy'], net)
449 wb = work.WorkerBridge(my_pubkey_hash, net, args.donation_percentage, bitcoind_work, best_block_header, merged_urls, best_share_var, tracker, my_share_hashes, my_doa_share_hashes, args.worker_fee, p2p_node, submit_block, set_best_share, broadcast_share, block_height_var)
450 web_root = web.get_web_root(tracker, bitcoind_work, get_current_txouts, datadir_path, net, wb.get_stale_counts, my_pubkey_hash, wb.local_rate_monitor, args.worker_fee, p2p_node, wb.my_share_hashes, wb.pseudoshare_received, wb.share_received, best_share_var, bitcoind_warning_var)
451 worker_interface.WorkerInterface(wb).attach_to(web_root, get_handler=lambda request: request.redirect('/static/'))
453 deferral.retry('Error binding to worker port:', traceback=False)(reactor.listenTCP)(worker_endpoint[1], server.Site(web_root), interface=worker_endpoint[0])
455 with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
463 print 'Started successfully!'
464 print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % (worker_endpoint[1],)
465 if args.donation_percentage > 0.51:
466 print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % (args.donation_percentage,)
467 elif args.donation_percentage < 0.49:
468 print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % (args.donation_percentage,)
470 print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % (args.donation_percentage,)
471 print 'You can increase this amount with --give-author argument! (or decrease it, if you must)'
475 if hasattr(signal, 'SIGALRM'):
476 signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
477 sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
479 signal.siginterrupt(signal.SIGALRM, False)
480 task.LoopingCall(signal.alarm, 30).start(1)
482 if args.irc_announce:
483 from twisted.words.protocols import irc
484 class IRCClient(irc.IRCClient):
485 nickname = 'p2pool%02i' % (random.randrange(100),)
486 channel = net.ANNOUNCE_CHANNEL
487 def lineReceived(self, line):
490 irc.IRCClient.lineReceived(self, line)
492 irc.IRCClient.signedOn(self)
493 self.factory.resetDelay()
494 self.join(self.channel)
495 @defer.inlineCallbacks
496 def new_share(share):
497 if share.pow_hash <= share.header['bits'].target and abs(share.timestamp - time.time()) < 10*60:
498 yield deferral.sleep(random.expovariate(1/60))
499 message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
500 if message not in self.recent_messages:
501 self.say(self.channel, message)
502 self._remember_message(message)
503 self.watch_id = tracker.verified.added.watch(new_share)
504 self.recent_messages = []
505 def _remember_message(self, message):
506 self.recent_messages.append(message)
507 while len(self.recent_messages) > 100:
508 self.recent_messages.pop(0)
509 def privmsg(self, user, channel, message):
510 if channel == self.channel:
511 self._remember_message(message)
512 def connectionLost(self, reason):
513 tracker.verified.added.unwatch(self.watch_id)
514 print 'IRC connection lost:', reason.getErrorMessage()
515 class IRCClientFactory(protocol.ReconnectingClientFactory):
517 reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())
519 @defer.inlineCallbacks
524 yield deferral.sleep(3)
526 if time.time() > bitcoind_work.value['last_update'] + 60:
527 print >>sys.stderr, '''---> LOST CONTACT WITH BITCOIND for %s! Check that it isn't frozen or dead! <---''' % (math.format_dt(time.time() - bitcoind_work.value['last_update']),)
529 height = tracker.get_height(best_share_var.value)
530 this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
532 len(tracker.verified.items),
535 sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
536 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
538 datums, dt = wb.local_rate_monitor.get_datums_in_last()
539 my_att_s = sum(datum['work']/dt for datum in datums)
540 this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
541 math.format(int(my_att_s)),
543 math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95),
544 math.format_dt(2**256 / tracker.items[best_share_var.value].max_target / my_att_s) if my_att_s and best_share_var.value else '???',
548 (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()
549 stale_prop = p2pool_data.get_average_stale_prop(tracker, best_share_var.value, min(60*60//net.SHARE_PERIOD, height))
550 real_att_s = p2pool_data.get_pool_attempts_per_second(tracker, best_share_var.value, min(height - 1, 60*60//net.SHARE_PERIOD)) / (1 - stale_prop)
552 this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % (
553 shares, stale_orphan_shares, stale_doa_shares,
554 math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
555 math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
556 get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8, net.PARENT.SYMBOL,
558 this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
559 math.format(int(real_att_s)),
561 math.format_dt(2**256 / bitcoind_work.value['bits'].target / real_att_s),
564 for warning in p2pool_data.get_warnings(tracker, best_share_var.value, net, bitcoind_warning_var.value):
565 print >>sys.stderr, '#'*40
566 print >>sys.stderr, '>>> Warning: ' + warning
567 print >>sys.stderr, '#'*40
569 if this_str != last_str or time.time() > last_time + 15:
572 last_time = time.time()
578 log.err(None, 'Fatal error:')
581 realnets = dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
583 parser = fixargparse.FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
584 parser.add_argument('--version', action='version', version=p2pool.__version__)
585 parser.add_argument('--net',
586 help='use specified network (default: bitcoin)',
587 action='store', choices=sorted(realnets), default='bitcoin', dest='net_name')
588 parser.add_argument('--testnet',
589 help='''use the network's testnet''',
590 action='store_const', const=True, default=False, dest='testnet')
591 parser.add_argument('--debug',
592 help='enable debugging mode',
593 action='store_const', const=True, default=False, dest='debug')
594 parser.add_argument('-a', '--address',
595 help='generate payouts to this address (default: <address requested from bitcoind>)',
596 type=str, action='store', default=None, dest='address')
597 parser.add_argument('--datadir',
598 help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
599 type=str, action='store', default=None, dest='datadir')
600 parser.add_argument('--logfile',
601 help='''log to this file (default: data/<NET>/log)''',
602 type=str, action='store', default=None, dest='logfile')
603 parser.add_argument('--merged',
604 help='call getauxblock on this url to get work for merged mining (example: http://ncuser:ncpass@127.0.0.1:10332/)',
605 type=str, action='append', default=[], dest='merged_urls')
606 parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
607 help='donate this percentage of work towards the development of p2pool (default: 0.5)',
608 type=float, action='store', default=0.5, dest='donation_percentage')
609 parser.add_argument('--iocp',
610 help='use Windows IOCP API in order to avoid errors due to large number of sockets being open',
611 action='store_true', default=False, dest='iocp')
612 parser.add_argument('--irc-announce',
613 help='announce any blocks found on irc://irc.freenode.net/#p2pool',
614 action='store_true', default=False, dest='irc_announce')
615 parser.add_argument('--no-bugreport',
616 help='disable submitting caught exceptions to the author',
617 action='store_true', default=False, dest='no_bugreport')
619 p2pool_group = parser.add_argument_group('p2pool interface')
620 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
621 help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
622 type=int, action='store', default=None, dest='p2pool_port')
623 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
624 help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
625 type=str, action='append', default=[], dest='p2pool_nodes')
626 parser.add_argument('--disable-upnp',
627 help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
628 action='store_false', default=True, dest='upnp')
629 p2pool_group.add_argument('--max-conns', metavar='CONNS',
630 help='maximum incoming connections (default: 40)',
631 type=int, action='store', default=40, dest='p2pool_conns')
633 worker_group = parser.add_argument_group('worker interface')
634 worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT',
635 help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
636 type=str, action='store', default=None, dest='worker_endpoint')
637 worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
638 help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
639 type=float, action='store', default=0, dest='worker_fee')
641 bitcoind_group = parser.add_argument_group('bitcoind interface')
642 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
643 help='connect to this address (default: 127.0.0.1)',
644 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
645 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
646 help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
647 type=int, action='store', default=None, dest='bitcoind_rpc_port')
648 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
649 help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
650 type=int, action='store', default=None, dest='bitcoind_p2p_port')
652 bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
653 help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
654 type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
656 args = parser.parse_args()
660 defer.setDebugging(True)
662 net_name = args.net_name + ('_testnet' if args.testnet else '')
663 net = networks.nets[net_name]
665 datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
666 if not os.path.exists(datadir_path):
667 os.makedirs(datadir_path)
669 if len(args.bitcoind_rpc_userpass) > 2:
670 parser.error('a maximum of two arguments are allowed')
671 args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
673 if args.bitcoind_rpc_password is None:
674 conf_path = net.PARENT.CONF_FILE_FUNC()
675 if not os.path.exists(conf_path):
676 parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
677 '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
680 '''rpcpassword=%x\r\n'''
682 '''Keep that password secret! After creating the file, restart Bitcoin.''' % (conf_path, random.randrange(2**128)))
683 with open(conf_path, 'rb') as f:
684 cp = ConfigParser.RawConfigParser()
685 cp.readfp(StringIO.StringIO('[x]\r\n' + f.read()))
686 for conf_name, var_name, var_type in [
687 ('rpcuser', 'bitcoind_rpc_username', str),
688 ('rpcpassword', 'bitcoind_rpc_password', str),
689 ('rpcport', 'bitcoind_rpc_port', int),
690 ('port', 'bitcoind_p2p_port', int),
692 if getattr(args, var_name) is None and cp.has_option('x', conf_name):
693 setattr(args, var_name, var_type(cp.get('x', conf_name)))
694 if args.bitcoind_rpc_password is None:
695 parser.error('''Bitcoin configuration file didn't contain an rpcpassword= line! Add one!''')
697 if args.bitcoind_rpc_username is None:
698 args.bitcoind_rpc_username = ''
700 if args.bitcoind_rpc_port is None:
701 args.bitcoind_rpc_port = net.PARENT.RPC_PORT
703 if args.bitcoind_p2p_port is None:
704 args.bitcoind_p2p_port = net.PARENT.P2P_PORT
706 if args.p2pool_port is None:
707 args.p2pool_port = net.P2P_PORT
709 if args.worker_endpoint is None:
710 worker_endpoint = '', net.WORKER_PORT
711 elif ':' not in args.worker_endpoint:
712 worker_endpoint = '', int(args.worker_endpoint)
714 addr, port = args.worker_endpoint.rsplit(':', 1)
715 worker_endpoint = addr, int(port)
717 if args.address is not None:
719 args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
721 parser.error('error parsing address: ' + repr(e))
723 args.pubkey_hash = None
725 def separate_url(url):
726 s = urlparse.urlsplit(url)
727 if '@' not in s.netloc:
728 parser.error('merged url netloc must contain an "@"')
729 userpass, new_netloc = s.netloc.rsplit('@', 1)
730 return urlparse.urlunsplit(s._replace(netloc=new_netloc)), userpass
731 merged_urls = map(separate_url, args.merged_urls)
733 if args.logfile is None:
734 args.logfile = os.path.join(datadir_path, 'log')
736 logfile = logging.LogFile(args.logfile)
737 pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
738 sys.stdout = logging.AbortPipe(pipe)
739 sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
740 if hasattr(signal, "SIGUSR1"):
741 def sigusr1(signum, frame):
742 print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
744 print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
745 signal.signal(signal.SIGUSR1, sigusr1)
746 task.LoopingCall(logfile.reopen).start(5)
748 class ErrorReporter(object):
750 self.last_sent = None
752 def emit(self, eventDict):
753 if not eventDict["isError"]:
756 if self.last_sent is not None and time.time() < self.last_sent + 5:
758 self.last_sent = time.time()
760 if 'failure' in eventDict:
761 text = ((eventDict.get('why') or 'Unhandled Error')
762 + '\n' + eventDict['failure'].getTraceback())
764 text = " ".join([str(m) for m in eventDict["message"]]) + "\n"
766 from twisted.web import client
768 url='http://u.forre.st/p2pool_error.cgi',
770 postdata=p2pool.__version__ + ' ' + net.NAME + '\n' + text,
772 ).addBoth(lambda x: None)
773 if not args.no_bugreport:
774 log.addObserver(ErrorReporter().emit)
776 reactor.callWhenRunning(main, args, net, datadir_path, merged_urls, worker_endpoint)