4 from __future__ import division
19 from twisted.internet import defer, reactor, task
20 from twisted.web import server, resource
21 from twisted.python import log
22 from nattraverso import portmapper, ipdiscover
24 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
25 from bitcoin import worker_interface
26 from util import db, expiring_dict, jsonrpc, variable, deferral, math
27 from . import p2p, skiplists, networks
28 import p2pool, p2pool.data as p2pool_data
30 @deferral.retry('Error getting work from bitcoind:', 3)
31 @defer.inlineCallbacks
32 def getwork(bitcoind):
33 work = yield bitcoind.rpc_getmemorypool()
34 defer.returnValue(dict(
35 version=work['version'],
36 previous_block_hash=int(work['previousblockhash'], 16),
37 transactions=[bitcoin_data.tx_type.unpack(x.decode('hex')) for x in work['transactions']],
38 subsidy=work['coinbasevalue'],
40 target=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
43 @deferral.retry('Error creating payout script:', 10)
44 @defer.inlineCallbacks
45 def get_payout_script2(bitcoind, net):
46 address = yield bitcoind.rpc_getaccountaddress('p2pool')
47 validate_response = yield bitcoind.rpc_validateaddress(address)
48 if 'pubkey' not in validate_response:
49 print ' Pubkey request failed. Falling back to payout to address.'
50 defer.returnValue(bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(address, net)))
51 pubkey = validate_response['pubkey'].decode('hex')
52 defer.returnValue(bitcoin_data.pubkey_to_script2(pubkey))
54 @defer.inlineCallbacks
57 print 'p2pool (version %s)' % (p2pool.__version__,)
63 print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
66 # connect to bitcoind over JSON-RPC and do initial getmemorypool
67 url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
68 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
69 bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
70 good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.BITCOIN_RPC_CHECK)(bitcoind)
72 print " Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
74 temp_work = yield getwork(bitcoind)
76 print ' Current block hash: %x' % (temp_work['previous_block_hash'],)
79 # connect to bitcoind over bitcoin-p2p
80 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
81 factory = bitcoin_p2p.ClientFactory(net)
82 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
83 yield factory.getProtocol() # waits until handshake is successful
87 if args.pubkey_hash is None:
88 print 'Getting payout address from bitcoind...'
89 my_script = yield get_payout_script2(bitcoind, net)
91 print 'Computing payout script from provided address....'
92 my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
94 print ' Payout script:', bitcoin_data.script2_to_human(my_script, net)
97 print 'Loading cached block headers...'
98 ht = bitcoin_p2p.HeightTracker(factory, net.NAME + '_headers.dat')
99 print ' ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
102 tracker = p2pool_data.OkayTracker(net)
103 shared_share_hashes = set()
104 ss = p2pool_data.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), net.NAME + '_shares.'), net)
105 known_verified = set()
106 print "Loading shares..."
107 for i, (mode, contents) in enumerate(ss.get_shares()):
109 if contents.hash in tracker.shares:
111 shared_share_hashes.add(contents.hash)
112 contents.time_seen = 0
113 tracker.add(contents)
114 if len(tracker.shares) % 1000 == 0 and tracker.shares:
115 print " %i" % (len(tracker.shares),)
116 elif mode == 'verified_hash':
117 known_verified.add(contents)
119 raise AssertionError()
120 print " ...inserting %i verified shares..." % (len(known_verified),)
121 for h in known_verified:
122 if h not in tracker.shares:
123 ss.forget_verified_share(h)
125 tracker.verified.add(tracker.shares[h])
126 print " ...done loading %i shares!" % (len(tracker.shares),)
128 tracker.added.watch(lambda share: ss.add_share(share))
129 tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
130 tracker.removed.watch(lambda share: ss.forget_share(share.hash))
131 tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
132 tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
134 peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
136 # information affecting work that should trigger a long-polling update
137 current_work = variable.Variable(None)
138 # information affecting work that should not trigger a long-polling update
139 current_work2 = variable.Variable(None)
141 work_updated = variable.Event()
143 requested = expiring_dict.ExpiringDict(300)
145 @defer.inlineCallbacks
146 def set_real_work1():
147 work = yield getwork(bitcoind)
148 changed = work['previous_block_hash'] != current_work.value['previous_block'] if current_work.value is not None else True
149 current_work.set(dict(
150 version=work['version'],
151 previous_block=work['previous_block_hash'],
152 target=work['target'],
153 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
154 aux_work=current_work.value['aux_work'] if current_work.value is not None else None,
156 current_work2.set(dict(
158 transactions=work['transactions'],
159 subsidy=work['subsidy'],
160 clock_offset=time.time() - work['time'],
161 last_update=time.time(),
166 def set_real_work2():
167 best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
169 t = dict(current_work.value)
170 t['best_share_hash'] = best
174 for peer2, share_hash in desired:
175 if share_hash not in tracker.tails: # was received in the time tracker.think was running
177 last_request_time, count = requested.get(share_hash, (None, 0))
178 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
180 potential_peers = set()
181 for head in tracker.tails[share_hash]:
182 potential_peers.update(peer_heads.get(head, set()))
183 potential_peers = [peer for peer in potential_peers if peer.connected2]
184 if count == 0 and peer2 is not None and peer2.connected2:
187 peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
191 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
195 stops=list(set(tracker.heads) | set(
196 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
199 requested[share_hash] = t, count + 1
201 print 'Initializing work...'
202 yield set_real_work1()
207 @defer.inlineCallbacks
208 def set_merged_work():
209 if not args.merged_url:
212 merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
213 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged.rpc_getauxblock)()
214 x = dict(current_work.value)
215 x['aux_work'] = dict(
216 hash=int(auxblock['hash'], 16),
217 target=bitcoin_data.HashType().unpack(auxblock['target'].decode('hex')),
218 chain_id=auxblock['chainid'],
222 yield deferral.sleep(1)
225 start_time = time.time() - current_work2.value['clock_offset']
227 # setup p2p logic and join p2pool network
229 def p2p_shares(shares, peer=None):
231 print 'Processing %i shares...' % (len(shares),)
235 if share.hash in tracker.shares:
236 #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
241 #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
245 if shares and peer is not None:
246 peer_heads.setdefault(shares[0].hash, set()).add(peer)
252 print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
254 @tracker.verified.added.watch
256 if share.pow_hash <= share.header['target']:
257 if factory.conn.value is not None:
258 factory.conn.value.send_block(block=share.as_block(tracker, net))
260 print 'No bitcoind connection! Erp!'
262 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool_data.format_hash(share.hash), share.header_hash,)
265 def p2p_share_hashes(share_hashes, peer):
268 for share_hash in share_hashes:
269 if share_hash in tracker.shares:
271 last_request_time, count = requested.get(share_hash, (None, 0))
272 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
274 print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
275 get_hashes.append(share_hash)
276 requested[share_hash] = t, count + 1
278 if share_hashes and peer is not None:
279 peer_heads.setdefault(share_hashes[0], set()).add(peer)
281 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
283 def p2p_get_shares(share_hashes, parents, stops, peer):
284 parents = min(parents, 1000//len(share_hashes))
287 for share_hash in share_hashes:
288 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
289 if share.hash in stops:
292 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
293 peer.sendShares(shares)
295 print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
299 ip, port = x.split(':')
302 return x, net.P2P_PORT
305 ('72.14.191.28', net.P2P_PORT),
306 ('62.204.197.159', net.P2P_PORT),
307 ('142.58.248.28', net.P2P_PORT),
308 ('94.23.34.145', net.P2P_PORT),
312 'dabuttonfactory.com',
315 nodes.add(((yield reactor.resolve(host)), net.P2P_PORT))
317 log.err(None, 'Error resolving bootstrap node IP:')
319 if net.NAME == 'litecoin':
320 nodes.add(((yield reactor.resolve('liteco.in')), net.P2P_PORT))
323 current_work=current_work,
324 port=args.p2pool_port,
326 addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), net.NAME),
327 preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
329 p2p_node.handle_shares = p2p_shares
330 p2p_node.handle_share_hashes = p2p_share_hashes
331 p2p_node.handle_get_shares = p2p_get_shares
335 # send share when the chain changes to their chain
336 def work_changed(new_work):
337 #print 'Work changed:', new_work
339 for share in tracker.get_chain_known(new_work['best_share_hash']):
340 if share.hash in shared_share_hashes:
342 shared_share_hashes.add(share.hash)
345 for peer in p2p_node.peers.itervalues():
346 peer.sendShares([share for share in shares if share.peer is not peer])
348 current_work.changed.watch(work_changed)
353 @defer.inlineCallbacks
357 is_lan, lan_ip = yield ipdiscover.get_local_ip()
359 pm = yield portmapper.get_port_mapper()
360 yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
361 except defer.TimeoutError:
365 log.err(None, "UPnP error:")
366 yield deferral.sleep(random.expovariate(1/120))
371 # start listening for workers with a JSON-RPC server
373 print 'Listening for workers on port %i...' % (args.worker_port,)
377 merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
378 run_identifier = struct.pack('<I', random.randrange(2**32))
380 share_counter = skiplists.CountsSkipList(tracker, run_identifier)
381 removed_unstales = set()
382 def get_share_counts(doa=False):
383 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
384 matching_in_chain = share_counter(current_work.value['best_share_hash'], height) | removed_unstales
385 shares_in_chain = my_shares & matching_in_chain
386 stale_shares = my_shares - matching_in_chain
388 stale_doa_shares = stale_shares & doa_shares
389 stale_not_doa_shares = stale_shares - stale_doa_shares
390 return len(shares_in_chain) + len(stale_shares), len(stale_doa_shares), len(stale_not_doa_shares)
391 return len(shares_in_chain) + len(stale_shares), len(stale_shares)
392 @tracker.verified.removed.watch
394 if share.hash in my_shares and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
395 removed_unstales.add(share.hash)
398 def get_payout_script_from_username(request):
399 user = worker_interface.get_username(request)
403 return bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(user, net))
407 def compute(request):
408 state = current_work.value
409 payout_script = get_payout_script_from_username(request)
410 if payout_script is None or random.uniform(0, 100) < args.worker_fee:
411 payout_script = my_script
412 if state['best_share_hash'] is None and net.PERSIST:
413 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
414 if len(p2p_node.peers) == 0 and net.PERSIST:
415 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
416 if time.time() > current_work2.value['last_update'] + 60:
417 raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
419 if state['aux_work'] is not None:
420 aux_str = '\xfa\xbemm' + bitcoin_data.HashType().pack(state['aux_work']['hash'])[::-1] + struct.pack('<ii', 1, 0)
424 # XXX assuming generate_tx is smallish here..
425 def get_stale_frac():
426 shares, stale_shares = get_share_counts()
429 frac = stale_shares/shares
430 return 2*struct.pack('<H', int(65535*frac + .5))
431 subsidy = current_work2.value['subsidy']
434 timestamp = current_work2.value['time']
435 previous_share = tracker.shares[state['best_share_hash']] if state['best_share_hash'] is not None else None
436 share_info, generate_tx = p2pool_data.generate_transaction(
439 previous_share_hash=state['best_share_hash'],
441 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
442 new_script=payout_script,
444 donation=math.perfect_round(65535*args.donation_percentage/100),
445 stale_frac=(lambda shares, stales:
446 255 if shares == 0 else math.perfect_round(254*stales/shares)
447 )(*get_share_counts()),
449 block_target=state['target'],
450 desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
454 print 'New work for worker! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (bitcoin_data.target_to_difficulty(share_info['target']), (sum(t['value'] for t in generate_tx['tx_outs'] if t['script'] == payout_script) -subsidy//200)*1e-8, net.BITCOIN_SYMBOL, subsidy*1e-8, net.BITCOIN_SYMBOL, len(current_work2.value['transactions']))
455 #print 'Target: %x' % (p2pool_data.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
456 #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
457 transactions = [generate_tx] + list(current_work2.value['transactions'])
458 merkle_root = bitcoin_data.merkle_hash(transactions)
459 merkle_root_to_transactions[merkle_root] = share_info, transactions
461 target2 = share_info['target']
462 times[merkle_root] = time.time()
463 #print 'SENT', 2**256//p2pool_data.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
464 return bitcoin_getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2), state['best_share_hash']
470 def got_response(header, request):
472 user = worker_interface.get_username(request)
473 # match up with transactions
474 xxx = merkle_root_to_transactions.get(header['merkle_root'], None)
476 print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
478 share_info, transactions = xxx
480 hash_ = bitcoin_data.block_header_type.hash256(header)
482 pow_hash = net.BITCOIN_POW_FUNC(header)
484 if pow_hash <= header['target'] or p2pool.DEBUG:
485 if factory.conn.value is not None:
486 factory.conn.value.send_block(block=dict(header=header, txs=transactions))
488 print 'No bitcoind connection! Erp!'
489 if pow_hash <= header['target']:
491 print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
494 if current_work.value['aux_work'] is not None and pow_hash <= current_work.value['aux_work']['target']:
500 merkle_branch=[x['hash'] for x in p2pool_data.calculate_merkle_branch(transactions, 0)],
505 parent_block_header=header,
508 a, b = transactions[0]['tx_ins'][0]['script'][-32-8:-8].encode('hex'), bitcoin_data.aux_pow_type.pack(aux_pow).encode('hex')
510 merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
512 print "MERGED RESULT:", res
513 merged.rpc_getauxblock(a, b).addBoth(_)
515 log.err(None, 'Error while processing merged mining POW:')
517 target = share_info['target']
518 if pow_hash > target:
519 print 'Worker submitted share with hash > target:\nhash : %x\ntarget: %x' % (pow_hash, target)
521 share = p2pool_data.Share(net, header, share_info, other_txs=transactions[1:])
522 my_shares.add(share.hash)
523 if share.previous_hash != current_work.value['best_share_hash']:
524 doa_shares.add(share.hash)
525 print 'GOT SHARE! %s %s prev %s age %.2fs' % (user, p2pool_data.format_hash(share.hash), p2pool_data.format_hash(share.previous_hash), time.time() - times[header['merkle_root']]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
526 good = share.previous_hash == current_work.value['best_share_hash']
527 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
529 # eg. good = share.hash == current_work.value['best_share_hash'] here
532 log.err(None, 'Error processing data received from worker:')
535 web_root = worker_interface.WorkerInterface(compute, got_response, current_work.changed)
538 if current_work.value['best_share_hash'] is not None:
539 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
540 att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720))
541 fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
542 return json.dumps(int(att_s / (1. - (math.median(fracs) if fracs else 0))))
543 return json.dumps(None)
546 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
547 weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
549 for script in sorted(weights, key=lambda s: weights[s]):
550 res[bitcoin_data.script2_to_human(script, net)] = weights[script]/total_weight
551 return json.dumps(res)
553 class WebInterface(resource.Resource):
554 def __init__(self, func, mime_type):
555 self.func, self.mime_type = func, mime_type
557 def render_GET(self, request):
558 request.setHeader('Content-Type', self.mime_type)
561 web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
562 web_root.putChild('users', WebInterface(get_users, 'application/json'))
563 web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
565 web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
567 reactor.listenTCP(args.worker_port, server.Site(web_root))
574 # do new getwork when a block is heard on the p2p interface
576 def new_block(block_hash):
577 work_updated.happened()
578 factory.new_block.watch(new_block)
580 print 'Started successfully!'
583 ht.updated.watch(set_real_work2)
585 @defer.inlineCallbacks
588 flag = work_updated.get_deferred()
590 yield set_real_work1()
593 yield defer.DeferredList([flag, deferral.sleep(random.uniform(1, 10))], fireOnOneCallback=True)
595 @defer.inlineCallbacks
602 yield deferral.sleep(random.expovariate(1/20))
608 if hasattr(signal, 'SIGALRM'):
609 def watchdog_handler(signum, frame):
610 print 'Watchdog timer went off at:'
611 traceback.print_stack()
613 signal.signal(signal.SIGALRM, watchdog_handler)
614 task.LoopingCall(signal.alarm, 30).start(1)
619 yield deferral.sleep(3)
621 if time.time() > current_work2.value['last_update'] + 60:
622 print '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
623 if current_work.value['best_share_hash'] is not None:
624 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
626 att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720))
627 weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
628 shares, stale_doa_shares, stale_not_doa_shares = get_share_counts(True)
629 stale_shares = stale_doa_shares + stale_not_doa_shares
630 fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
631 str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i' % (
632 math.format(int(att_s / (1. - (math.median(fracs) if fracs else 0)))),
634 len(tracker.verified.shares),
636 weights.get(my_script, 0)/total_weight*100,
637 math.format(int(weights.get(my_script, 0)*att_s//total_weight / (1. - (math.median(fracs) if fracs else 0)))),
639 stale_not_doa_shares,
642 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
643 if (str != pool_str):
647 med = math.median(fracs)
648 print 'Pool stales: %i%%' % (int(100*med+.5),),
651 print 'Own:', '%i±%i%%' % tuple(int(100*x+.5) for x in math.interval_to_center_radius(math.binomial_conf_interval(stale_shares, shares, conf))),
653 print 'Own efficiency:', '%i±%i%%' % tuple(int(100*x+.5) for x in math.interval_to_center_radius((1 - y)/(1 - med) for y in math.binomial_conf_interval(stale_shares, shares, conf)[::-1])),
654 print '(%i%% confidence)' % (int(100*conf+.5),),
661 log.err(None, 'Fatal error:')
666 class FixedArgumentParser(argparse.ArgumentParser):
667 def _read_args_from_files(self, arg_strings):
668 # expand arguments referencing files
670 for arg_string in arg_strings:
672 # for regular arguments, just add them back into the list
673 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
674 new_arg_strings.append(arg_string)
676 # replace arguments referencing files with the file content
679 args_file = open(arg_string[1:])
682 for arg_line in args_file.read().splitlines():
683 for arg in self.convert_arg_line_to_args(arg_line):
684 arg_strings.append(arg)
685 arg_strings = self._read_args_from_files(arg_strings)
686 new_arg_strings.extend(arg_strings)
690 err = sys.exc_info()[1]
693 # return the modified argument list
694 return new_arg_strings
696 def convert_arg_line_to_args(self, arg_line):
697 return [arg for arg in arg_line.split() if arg.strip()]
699 parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
700 parser.add_argument('--version', action='version', version=p2pool.__version__)
701 parser.add_argument('--net',
702 help='use specified network (default: bitcoin)',
703 action='store', choices=sorted(networks.realnets), default='bitcoin', dest='net_name')
704 parser.add_argument('--testnet',
705 help='''use the network's testnet''',
706 action='store_const', const=True, default=False, dest='testnet')
707 parser.add_argument('--debug',
708 help='debugging mode',
709 action='store_const', const=True, default=False, dest='debug')
710 parser.add_argument('-a', '--address',
711 help='generate to this address (defaults to requesting one from bitcoind)',
712 type=str, action='store', default=None, dest='address')
713 parser.add_argument('--logfile',
714 help='''log to specific file (defaults to <network_name>.log in run_p2pool.py's directory)''',
715 type=str, action='store', default=None, dest='logfile')
716 parser.add_argument('--merged-url',
717 help='call getauxblock on this url to get work for merged mining',
718 type=str, action='store', default=None, dest='merged_url')
719 parser.add_argument('--merged-userpass',
720 help='merge daemon user and password, separated by a colon. Example: ncuser:ncpass',
721 type=str, action='store', default=None, dest='merged_userpass')
722 parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
723 help='percentage amount to donate to author of p2pool. Default: 0.5',
724 type=float, action='store', default=0.5, dest='donation_percentage')
726 p2pool_group = parser.add_argument_group('p2pool interface')
727 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
728 help='use TCP port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.P2P_PORT) for _, n in sorted(networks.realnets.items())),
729 type=int, action='store', default=None, dest='p2pool_port')
730 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
731 help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to default p2pool P2P port), in addition to builtin addresses',
732 type=str, action='append', default=[], dest='p2pool_nodes')
733 parser.add_argument('--disable-upnp',
734 help='''don't attempt to forward p2pool P2P port from the WAN to this computer using UPnP''',
735 action='store_false', default=True, dest='upnp')
737 worker_group = parser.add_argument_group('worker interface')
738 worker_group.add_argument('-w', '--worker-port', metavar='PORT',
739 help='listen on PORT for RPC connections from miners asking for work and providing responses (default:%s)' % ', '.join('%s:%i' % (n.NAME, n.WORKER_PORT) for _, n in sorted(networks.realnets.items())),
740 type=int, action='store', default=None, dest='worker_port')
741 worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
742 help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee . default: 0''',
743 type=float, action='store', default=0, dest='worker_fee')
745 bitcoind_group = parser.add_argument_group('bitcoind interface')
746 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
747 help='connect to a bitcoind at this address (default: 127.0.0.1)',
748 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
749 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
750 help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getmemorypool (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.BITCOIN_RPC_PORT) for _, n in sorted(networks.realnets.items())),
751 type=int, action='store', default=None, dest='bitcoind_rpc_port')
752 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
753 help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.BITCOIN_P2P_PORT) for _, n in sorted(networks.realnets.items())),
754 type=int, action='store', default=None, dest='bitcoind_p2p_port')
756 bitcoind_group.add_argument(metavar='BITCOIND_RPCUSER',
757 help='bitcoind RPC interface username (default: empty)',
758 type=str, action='store', default='', nargs='?', dest='bitcoind_rpc_username')
759 bitcoind_group.add_argument(metavar='BITCOIND_RPCPASSWORD',
760 help='bitcoind RPC interface password',
761 type=str, action='store', dest='bitcoind_rpc_password')
763 args = parser.parse_args()
768 net = networks.nets[args.net_name + ('_testnet' if args.testnet else '')]
770 if args.logfile is None:
771 args.logfile = os.path.join(os.path.dirname(sys.argv[0]), net.NAME + '.log')
773 class LogFile(object):
774 def __init__(self, filename):
775 self.filename = filename
776 self.inner_file = None
779 if self.inner_file is not None:
780 self.inner_file.close()
781 open(self.filename, 'a').close()
782 f = open(self.filename, 'rb')
783 f.seek(0, os.SEEK_END)
785 if length > 100*1000*1000:
786 f.seek(-1000*1000, os.SEEK_END)
788 if f.read(1) in ('', '\n'):
792 f = open(self.filename, 'wb')
795 self.inner_file = open(self.filename, 'a')
796 def write(self, data):
797 self.inner_file.write(data)
799 self.inner_file.flush()
800 class TeePipe(object):
801 def __init__(self, outputs):
802 self.outputs = outputs
803 def write(self, data):
804 for output in self.outputs:
807 for output in self.outputs:
809 class TimestampingPipe(object):
810 def __init__(self, inner_file):
811 self.inner_file = inner_file
814 def write(self, data):
815 buf = self.buf + data
816 lines = buf.split('\n')
817 for line in lines[:-1]:
818 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
819 self.inner_file.flush()
823 logfile = LogFile(args.logfile)
824 sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
825 if hasattr(signal, "SIGUSR1"):
826 def sigusr1(signum, frame):
827 print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
829 print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
830 signal.signal(signal.SIGUSR1, sigusr1)
831 task.LoopingCall(logfile.reopen).start(5)
833 if args.bitcoind_rpc_port is None:
834 args.bitcoind_rpc_port = net.BITCOIN_RPC_PORT
836 if args.bitcoind_p2p_port is None:
837 args.bitcoind_p2p_port = net.BITCOIN_P2P_PORT
839 if args.p2pool_port is None:
840 args.p2pool_port = net.P2P_PORT
842 if args.worker_port is None:
843 args.worker_port = net.WORKER_PORT
845 if args.address is not None:
847 args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net)
849 parser.error('error parsing address: ' + repr(e))
851 args.pubkey_hash = None
853 if (args.merged_url is None) ^ (args.merged_userpass is None):
854 parser.error('must specify --merged-url and --merged-userpass')
856 reactor.callWhenRunning(main, args, net)