3 from __future__ import division
18 from twisted.internet import defer, reactor, task
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
23 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
24 from bitcoin import worker_interface
25 from util import db, expiring_dict, jsonrpc, variable, deferral, math
26 from . import p2p, skiplists, networks
27 import p2pool, p2pool.data as p2pool_data
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind):
32 work = yield bitcoind.rpc_getmemorypool()
33 defer.returnValue(dict(
34 version=work['version'],
35 previous_block_hash=int(work['previousblockhash'], 16),
36 transactions=[bitcoin_data.tx_type.unpack(x.decode('hex')) for x in work['transactions']],
37 subsidy=work['coinbasevalue'],
39 target=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
42 @deferral.retry('Error creating payout script:', 10)
43 @defer.inlineCallbacks
44 def get_payout_script2(bitcoind, net):
45 address = yield bitcoind.rpc_getaccountaddress('p2pool')
47 pubkey = (yield bitcoind.rpc_validateaddress(address))['pubkey'].decode('hex')
50 defer.returnValue(bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(address, net)))
52 defer.returnValue(bitcoin_data.pubkey_to_script2(pubkey))
54 @defer.inlineCallbacks
57 print 'p2pool (version %s)' % (p2pool.__version__,)
63 print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
66 # connect to bitcoind over JSON-RPC and do initial getmemorypool
67 url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
68 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
69 bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
70 good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.BITCOIN_RPC_CHECK)(bitcoind)
72 print " Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
74 temp_work = yield getwork(bitcoind)
76 print ' Current block hash: %x' % (temp_work['previous_block_hash'],)
79 # connect to bitcoind over bitcoin-p2p
80 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
81 factory = bitcoin_p2p.ClientFactory(net)
82 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
83 yield factory.getProtocol() # waits until handshake is successful
87 if args.pubkey_hash is None:
88 print 'Getting payout address from bitcoind...'
89 my_script = yield get_payout_script2(bitcoind, net)
91 print 'Computing payout script from provided address....'
92 my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
94 print ' Payout script:', bitcoin_data.script2_to_human(my_script, net)
97 print 'Loading cached block headers...'
98 ht = bitcoin_p2p.HeightTracker(factory, net.NAME + '_headers.dat')
99 print ' ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
102 tracker = p2pool_data.OkayTracker(net)
103 shared_share_hashes = set()
104 ss = p2pool_data.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), net.NAME + '_shares.'), net)
105 known_verified = set()
106 print "Loading shares..."
107 for i, (mode, contents) in enumerate(ss.get_shares()):
109 if contents.hash in tracker.shares:
111 shared_share_hashes.add(contents.hash)
112 contents.time_seen = 0
113 tracker.add(contents)
114 if len(tracker.shares) % 1000 == 0 and tracker.shares:
115 print " %i" % (len(tracker.shares),)
116 elif mode == 'verified_hash':
117 known_verified.add(contents)
119 raise AssertionError()
120 print " ...inserting %i verified shares..." % (len(known_verified),)
121 for h in known_verified:
122 if h not in tracker.shares:
123 ss.forget_verified_share(h)
125 tracker.verified.add(tracker.shares[h])
126 print " ...done loading %i shares!" % (len(tracker.shares),)
128 tracker.added.watch(lambda share: ss.add_share(share))
129 tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
130 tracker.removed.watch(lambda share: ss.forget_share(share.hash))
131 tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
132 tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
134 peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
136 # information affecting work that should trigger a long-polling update
137 current_work = variable.Variable(None)
138 # information affecting work that should not trigger a long-polling update
139 current_work2 = variable.Variable(None)
141 work_updated = variable.Event()
143 requested = expiring_dict.ExpiringDict(300)
145 @defer.inlineCallbacks
146 def set_real_work1():
147 work = yield getwork(bitcoind)
148 changed = work['previous_block_hash'] != current_work.value['previous_block'] if current_work.value is not None else True
149 current_work.set(dict(
150 version=work['version'],
151 previous_block=work['previous_block_hash'],
152 target=work['target'],
153 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
154 aux_work=current_work.value['aux_work'] if current_work.value is not None else None,
156 current_work2.set(dict(
158 transactions=work['transactions'],
159 subsidy=work['subsidy'],
160 clock_offset=time.time() - work['time'],
161 last_update=time.time(),
166 def set_real_work2():
167 best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
169 t = dict(current_work.value)
170 t['best_share_hash'] = best
174 for peer2, share_hash in desired:
175 if share_hash not in tracker.tails: # was received in the time tracker.think was running
177 last_request_time, count = requested.get(share_hash, (None, 0))
178 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
180 potential_peers = set()
181 for head in tracker.tails[share_hash]:
182 potential_peers.update(peer_heads.get(head, set()))
183 potential_peers = [peer for peer in potential_peers if peer.connected2]
184 if count == 0 and peer2 is not None and peer2.connected2:
187 peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
191 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
195 stops=list(set(tracker.heads) | set(
196 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
199 requested[share_hash] = t, count + 1
201 print 'Initializing work...'
202 yield set_real_work1()
207 @defer.inlineCallbacks
208 def set_merged_work():
209 if not args.merged_url:
212 merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
213 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged.rpc_getauxblock)()
214 x = dict(current_work.value)
215 x['aux_work'] = dict(
216 hash=int(auxblock['hash'], 16),
217 target=bitcoin_data.HashType().unpack(auxblock['target'].decode('hex')),
218 chain_id=auxblock['chainid'],
222 yield deferral.sleep(1)
225 start_time = time.time() - current_work2.value['clock_offset']
227 # setup p2p logic and join p2pool network
229 def p2p_shares(shares, peer=None):
231 print 'Processing %i shares...' % (len(shares),)
235 if share.hash in tracker.shares:
236 #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
241 #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
245 if shares and peer is not None:
246 peer_heads.setdefault(shares[0].hash, set()).add(peer)
252 print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
254 @tracker.verified.added.watch
256 if share.pow_hash <= share.header['target']:
257 if factory.conn.value is not None:
258 factory.conn.value.send_block(block=share.as_block(tracker, net))
260 print 'No bitcoind connection! Erp!'
262 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool_data.format_hash(share.hash), share.header_hash,)
265 def p2p_share_hashes(share_hashes, peer):
268 for share_hash in share_hashes:
269 if share_hash in tracker.shares:
271 last_request_time, count = requested.get(share_hash, (None, 0))
272 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
274 print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
275 get_hashes.append(share_hash)
276 requested[share_hash] = t, count + 1
278 if share_hashes and peer is not None:
279 peer_heads.setdefault(share_hashes[0], set()).add(peer)
281 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
283 def p2p_get_shares(share_hashes, parents, stops, peer):
284 parents = min(parents, 1000//len(share_hashes))
287 for share_hash in share_hashes:
288 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
289 if share.hash in stops:
292 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
293 peer.sendShares(shares)
295 print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
299 ip, port = x.split(':')
302 return x, net.P2P_PORT
305 ('72.14.191.28', net.P2P_PORT),
306 ('62.204.197.159', net.P2P_PORT),
307 ('142.58.248.28', net.P2P_PORT),
308 ('94.23.34.145', net.P2P_PORT),
312 'dabuttonfactory.com',
315 nodes.add(((yield reactor.resolve(host)), net.P2P_PORT))
317 log.err(None, 'Error resolving bootstrap node IP:')
319 if net.NAME == 'litecoin':
320 nodes.add(((yield reactor.resolve('liteco.in')), net.P2P_PORT))
323 current_work=current_work,
324 port=args.p2pool_port,
326 addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), net.NAME),
327 preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
329 p2p_node.handle_shares = p2p_shares
330 p2p_node.handle_share_hashes = p2p_share_hashes
331 p2p_node.handle_get_shares = p2p_get_shares
335 # send share when the chain changes to their chain
336 def work_changed(new_work):
337 #print 'Work changed:', new_work
339 for share in tracker.get_chain_known(new_work['best_share_hash']):
340 if share.hash in shared_share_hashes:
342 shared_share_hashes.add(share.hash)
345 for peer in p2p_node.peers.itervalues():
346 peer.sendShares([share for share in shares if share.peer is not peer])
348 current_work.changed.watch(work_changed)
353 @defer.inlineCallbacks
357 is_lan, lan_ip = yield ipdiscover.get_local_ip()
359 pm = yield portmapper.get_port_mapper()
360 yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
361 except defer.TimeoutError:
365 log.err(None, "UPnP error:")
366 yield deferral.sleep(random.expovariate(1/120))
371 # start listening for workers with a JSON-RPC server
373 print 'Listening for workers on port %i...' % (args.worker_port,)
377 merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
378 run_identifier = struct.pack('<I', random.randrange(2**32))
380 share_counter = skiplists.CountsSkipList(tracker, run_identifier)
381 removed_unstales = set()
382 def get_share_counts(doa=False):
383 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
384 matching_in_chain = share_counter(current_work.value['best_share_hash'], height) | removed_unstales
385 shares_in_chain = my_shares & matching_in_chain
386 stale_shares = my_shares - matching_in_chain
388 stale_doa_shares = stale_shares & doa_shares
389 stale_not_doa_shares = stale_shares - stale_doa_shares
390 return len(shares_in_chain) + len(stale_shares), len(stale_doa_shares), len(stale_not_doa_shares)
391 return len(shares_in_chain) + len(stale_shares), len(stale_shares)
392 @tracker.verified.removed.watch
394 if share.hash in my_shares and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
395 removed_unstales.add(share.hash)
398 def get_payout_script_from_username(request):
399 user = worker_interface.get_username(request)
403 return bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(user, net))
407 def compute(request):
408 state = current_work.value
409 payout_script = get_payout_script_from_username(request)
410 if payout_script is None or random.uniform(0, 100) < args.worker_fee:
411 payout_script = my_script
412 if state['best_share_hash'] is None and net.PERSIST:
413 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
414 if len(p2p_node.peers) == 0 and net.PERSIST:
415 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
416 if time.time() > current_work2.value['last_update'] + 60:
417 raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
419 if state['aux_work'] is not None:
420 aux_str = '\xfa\xbemm' + bitcoin_data.HashType().pack(state['aux_work']['hash'])[::-1] + struct.pack('<ii', 1, 0)
424 # XXX assuming generate_tx is smallish here..
425 def get_stale_frac():
426 shares, stale_shares = get_share_counts()
429 frac = stale_shares/shares
430 return 2*struct.pack('<H', int(65535*frac + .5))
431 subsidy = current_work2.value['subsidy']
434 timestamp = current_work2.value['time']
435 previous_share = tracker.shares[state['best_share_hash']] if state['best_share_hash'] is not None else None
436 new_share_info, generate_tx = p2pool_data.new_generate_transaction(
439 previous_share_hash=state['best_share_hash'],
441 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
442 new_script=payout_script,
444 donation=math.perfect_round(65535*args.donation_percentage/100),
445 stale_frac=(lambda shares, stales:
446 255 if shares == 0 else math.perfect_round(254*stales/shares)
447 )(*get_share_counts()),
449 block_target=state['target'],
450 desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
454 print 'New work for worker! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (bitcoin_data.target_to_difficulty(new_share_info['target']), (sum(t['value'] for t in generate_tx['tx_outs'] if t['script'] == payout_script) -subsidy//200)*1e-8, net.BITCOIN_SYMBOL, subsidy*1e-8, net.BITCOIN_SYMBOL, len(current_work2.value['transactions']))
455 #print 'Target: %x' % (p2pool_data.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
456 #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
457 transactions = [generate_tx] + list(current_work2.value['transactions'])
458 merkle_root = bitcoin_data.merkle_hash(transactions)
459 merkle_root_to_transactions[merkle_root] = new_share_info, transactions
461 target2 = new_share_info['target']
462 times[merkle_root] = time.time()
463 #print 'SENT', 2**256//p2pool_data.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
464 return bitcoin_getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2), state['best_share_hash']
470 def got_response(header, request):
472 user = worker_interface.get_username(request)
473 # match up with transactions
474 xxx = merkle_root_to_transactions.get(header['merkle_root'], None)
476 print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
478 share_info, transactions = xxx
479 new_share_info = share_info
481 hash_ = bitcoin_data.block_header_type.hash256(header)
483 pow_hash = net.BITCOIN_POW_FUNC(header)
485 if pow_hash <= header['target'] or p2pool.DEBUG:
486 if factory.conn.value is not None:
487 factory.conn.value.send_block(block=dict(header=header, txs=transactions))
489 print 'No bitcoind connection! Erp!'
490 if pow_hash <= header['target']:
492 print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
495 if current_work.value['aux_work'] is not None and pow_hash <= current_work.value['aux_work']['target']:
501 merkle_branch=[x['hash'] for x in p2pool_data.calculate_merkle_branch(transactions, 0)],
506 parent_block_header=header,
509 a, b = transactions[0]['tx_ins'][0]['script'][-32-8:-8].encode('hex'), bitcoin_data.aux_pow_type.pack(aux_pow).encode('hex')
511 merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
513 print "MERGED RESULT:", res
514 merged.rpc_getauxblock(a, b).addBoth(_)
516 log.err(None, 'Error while processing merged mining POW:')
518 target = new_share_info['target']
519 if pow_hash > target:
520 print 'Worker submitted share with hash > target:\nhash : %x\ntarget: %x' % (pow_hash, target)
522 share = p2pool_data.NewShare(net, header, new_share_info, other_txs=transactions[1:])
523 my_shares.add(share.hash)
524 if share.previous_hash != current_work.value['best_share_hash']:
525 doa_shares.add(share.hash)
526 print 'GOT SHARE! %s %s prev %s age %.2fs' % (user, p2pool_data.format_hash(share.hash), p2pool_data.format_hash(share.previous_hash), time.time() - times[header['merkle_root']]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
527 good = share.previous_hash == current_work.value['best_share_hash']
528 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
530 # eg. good = share.hash == current_work.value['best_share_hash'] here
533 log.err(None, 'Error processing data received from worker:')
536 web_root = worker_interface.WorkerInterface(compute, got_response, current_work.changed)
539 if current_work.value['best_share_hash'] is not None:
540 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
541 att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], net, min(height - 1, 720))
542 fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
543 return json.dumps(int(att_s / (1. - (math.median(fracs) if fracs else 0))))
544 return json.dumps(None)
547 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
548 weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
550 for script in sorted(weights, key=lambda s: weights[s]):
551 res[bitcoin_data.script2_to_human(script, net)] = weights[script]/total_weight
552 return json.dumps(res)
554 class WebInterface(resource.Resource):
555 def __init__(self, func, mime_type):
556 self.func, self.mime_type = func, mime_type
558 def render_GET(self, request):
559 request.setHeader('Content-Type', self.mime_type)
562 web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
563 web_root.putChild('users', WebInterface(get_users, 'application/json'))
564 web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
566 web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
568 reactor.listenTCP(args.worker_port, server.Site(web_root))
575 # do new getwork when a block is heard on the p2p interface
577 def new_block(block_hash):
578 work_updated.happened()
579 factory.new_block.watch(new_block)
581 print 'Started successfully!'
584 ht.updated.watch(set_real_work2)
586 @defer.inlineCallbacks
589 flag = work_updated.get_deferred()
591 yield set_real_work1()
594 yield defer.DeferredList([flag, deferral.sleep(random.uniform(1, 10))], fireOnOneCallback=True)
596 @defer.inlineCallbacks
603 yield deferral.sleep(random.expovariate(1/20))
609 if hasattr(signal, 'SIGALRM'):
610 def watchdog_handler(signum, frame):
611 print 'Watchdog timer went off at:'
612 traceback.print_stack()
614 signal.signal(signal.SIGALRM, watchdog_handler)
615 task.LoopingCall(signal.alarm, 30).start(1)
620 yield deferral.sleep(3)
622 if time.time() > current_work2.value['last_update'] + 60:
623 print '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
624 if current_work.value['best_share_hash'] is not None:
625 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
627 att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], net, min(height - 1, 720))
628 weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
629 shares, stale_doa_shares, stale_not_doa_shares = get_share_counts(True)
630 stale_shares = stale_doa_shares + stale_not_doa_shares
631 fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
632 str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i' % (
633 math.format(int(att_s / (1. - (math.median(fracs) if fracs else 0)))),
635 len(tracker.verified.shares),
637 weights.get(my_script, 0)/total_weight*100,
638 math.format(int(weights.get(my_script, 0)*att_s//total_weight / (1. - (math.median(fracs) if fracs else 0)))),
640 stale_not_doa_shares,
643 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
644 if (str != pool_str):
648 med = math.median(fracs)
649 print 'Median stale proportion:', med
651 print ' Own:', stale_shares/shares
653 print ' Own efficiency: %.02f%%' % (100*(1 - stale_shares/shares)/(1 - med),)
659 log.err(None, 'Fatal error:')
664 class FixedArgumentParser(argparse.ArgumentParser):
665 def _read_args_from_files(self, arg_strings):
666 # expand arguments referencing files
668 for arg_string in arg_strings:
670 # for regular arguments, just add them back into the list
671 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
672 new_arg_strings.append(arg_string)
674 # replace arguments referencing files with the file content
677 args_file = open(arg_string[1:])
680 for arg_line in args_file.read().splitlines():
681 for arg in self.convert_arg_line_to_args(arg_line):
682 arg_strings.append(arg)
683 arg_strings = self._read_args_from_files(arg_strings)
684 new_arg_strings.extend(arg_strings)
688 err = sys.exc_info()[1]
691 # return the modified argument list
692 return new_arg_strings
694 def convert_arg_line_to_args(self, arg_line):
695 return [arg for arg in arg_line.split() if arg.strip()]
697 parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
698 parser.add_argument('--version', action='version', version=p2pool.__version__)
699 parser.add_argument('--net',
700 help='use specified network (default: bitcoin)',
701 action='store', choices=sorted(networks.realnets), default='bitcoin', dest='net_name')
702 parser.add_argument('--testnet',
703 help='''use the network's testnet''',
704 action='store_const', const=True, default=False, dest='testnet')
705 parser.add_argument('--debug',
706 help='debugging mode',
707 action='store_const', const=True, default=False, dest='debug')
708 parser.add_argument('-a', '--address',
709 help='generate to this address (defaults to requesting one from bitcoind)',
710 type=str, action='store', default=None, dest='address')
711 parser.add_argument('--logfile',
712 help='''log to specific file (defaults to <network_name>.log in run_p2pool.py's directory)''',
713 type=str, action='store', default=None, dest='logfile')
714 parser.add_argument('--merged-url',
715 help='call getauxblock on this url to get work for merged mining',
716 type=str, action='store', default=None, dest='merged_url')
717 parser.add_argument('--merged-userpass',
718 help='merge daemon user and password, separated by a colon. Example: ncuser:ncpass',
719 type=str, action='store', default=None, dest='merged_userpass')
720 parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
721 help='percentage amount to donate to author of p2pool. Default: 0.5',
722 type=float, action='store', default=0.5, dest='donation_percentage')
724 p2pool_group = parser.add_argument_group('p2pool interface')
725 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
726 help='use TCP port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.P2P_PORT) for _, n in sorted(networks.realnets.items())),
727 type=int, action='store', default=None, dest='p2pool_port')
728 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
729 help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to default p2pool P2P port), in addition to builtin addresses',
730 type=str, action='append', default=[], dest='p2pool_nodes')
731 parser.add_argument('--disable-upnp',
732 help='''don't attempt to forward p2pool P2P port from the WAN to this computer using UPnP''',
733 action='store_false', default=True, dest='upnp')
735 worker_group = parser.add_argument_group('worker interface')
736 worker_group.add_argument('-w', '--worker-port', metavar='PORT',
737 help='listen on PORT for RPC connections from miners asking for work and providing responses (default:%s)' % ', '.join('%s:%i' % (n.NAME, n.WORKER_PORT) for _, n in sorted(networks.realnets.items())),
738 type=int, action='store', default=None, dest='worker_port')
739 worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
740 help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee . default: 0''',
741 type=float, action='store', default=0, dest='worker_fee')
743 bitcoind_group = parser.add_argument_group('bitcoind interface')
744 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
745 help='connect to a bitcoind at this address (default: 127.0.0.1)',
746 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
747 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
748 help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getmemorypool (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.BITCOIN_RPC_PORT) for _, n in sorted(networks.realnets.items())),
749 type=int, action='store', default=None, dest='bitcoind_rpc_port')
750 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
751 help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.BITCOIN_P2P_PORT) for _, n in sorted(networks.realnets.items())),
752 type=int, action='store', default=None, dest='bitcoind_p2p_port')
754 bitcoind_group.add_argument(metavar='BITCOIND_RPCUSER',
755 help='bitcoind RPC interface username (default: empty)',
756 type=str, action='store', default='', nargs='?', dest='bitcoind_rpc_username')
757 bitcoind_group.add_argument(metavar='BITCOIND_RPCPASSWORD',
758 help='bitcoind RPC interface password',
759 type=str, action='store', dest='bitcoind_rpc_password')
761 args = parser.parse_args()
766 net = networks.nets[args.net_name + ('_testnet' if args.testnet else '')]
768 if args.logfile is None:
769 args.logfile = os.path.join(os.path.dirname(sys.argv[0]), net.NAME + '.log')
771 class LogFile(object):
772 def __init__(self, filename):
773 self.filename = filename
774 self.inner_file = None
777 if self.inner_file is not None:
778 self.inner_file.close()
779 open(self.filename, 'a').close()
780 f = open(self.filename, 'rb')
781 f.seek(0, os.SEEK_END)
783 if length > 100*1000*1000:
784 f.seek(-1000*1000, os.SEEK_END)
786 if f.read(1) in ('', '\n'):
790 f = open(self.filename, 'wb')
793 self.inner_file = open(self.filename, 'a')
794 def write(self, data):
795 self.inner_file.write(data)
797 self.inner_file.flush()
798 class TeePipe(object):
799 def __init__(self, outputs):
800 self.outputs = outputs
801 def write(self, data):
802 for output in self.outputs:
805 for output in self.outputs:
807 class TimestampingPipe(object):
808 def __init__(self, inner_file):
809 self.inner_file = inner_file
812 def write(self, data):
813 buf = self.buf + data
814 lines = buf.split('\n')
815 for line in lines[:-1]:
816 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
817 self.inner_file.flush()
821 logfile = LogFile(args.logfile)
822 sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
823 if hasattr(signal, "SIGUSR1"):
824 def sigusr1(signum, frame):
825 print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
827 print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
828 signal.signal(signal.SIGUSR1, sigusr1)
829 task.LoopingCall(logfile.reopen).start(5)
831 if args.bitcoind_rpc_port is None:
832 args.bitcoind_rpc_port = net.BITCOIN_RPC_PORT
834 if args.bitcoind_p2p_port is None:
835 args.bitcoind_p2p_port = net.BITCOIN_P2P_PORT
837 if args.p2pool_port is None:
838 args.p2pool_port = net.P2P_PORT
840 if args.worker_port is None:
841 args.worker_port = net.WORKER_PORT
843 if args.address is not None:
845 args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net)
847 parser.error('error parsing address: ' + repr(e))
849 args.pubkey_hash = None
851 if (args.merged_url is None) ^ (args.merged_userpass is None):
852 parser.error('must specify --merged-url and --merged-userpass')
854 reactor.callWhenRunning(main, args, net)