4 from __future__ import division
19 from twisted.internet import defer, reactor, task
20 from twisted.web import server, resource
21 from twisted.python import log
22 from nattraverso import portmapper, ipdiscover
24 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
25 from bitcoin import worker_interface
26 from util import expiring_dict, jsonrpc, variable, deferral, math
27 from . import p2p, skiplists, networks
28 import p2pool, p2pool.data as p2pool_data
30 @deferral.retry('Error getting work from bitcoind:', 3)
31 @defer.inlineCallbacks
32 def getwork(bitcoind):
33 work = yield bitcoind.rpc_getmemorypool()
34 defer.returnValue(dict(
35 version=work['version'],
36 previous_block_hash=int(work['previousblockhash'], 16),
37 transactions=[bitcoin_data.tx_type.unpack(x.decode('hex')) for x in work['transactions']],
38 subsidy=work['coinbasevalue'],
40 target=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
43 @deferral.retry('Error creating payout script:', 10)
44 @defer.inlineCallbacks
45 def get_payout_script2(bitcoind, net):
46 address = yield bitcoind.rpc_getaccountaddress('p2pool')
47 validate_response = yield bitcoind.rpc_validateaddress(address)
48 if 'pubkey' not in validate_response:
49 print ' Pubkey request failed. Falling back to payout to address.'
50 defer.returnValue(bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(address, net)))
51 pubkey = validate_response['pubkey'].decode('hex')
52 defer.returnValue(bitcoin_data.pubkey_to_script2(pubkey))
54 @defer.inlineCallbacks
55 def main(args, net, datadir_path):
57 print 'p2pool (version %s)' % (p2pool.__version__,)
63 print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
66 # connect to bitcoind over JSON-RPC and do initial getmemorypool
67 url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
68 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
69 bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
70 good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.BITCOIN_RPC_CHECK)(bitcoind)
72 print " Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
74 temp_work = yield getwork(bitcoind)
76 print ' Current block hash: %x' % (temp_work['previous_block_hash'],)
79 # connect to bitcoind over bitcoin-p2p
80 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
81 factory = bitcoin_p2p.ClientFactory(net)
82 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
83 yield factory.getProtocol() # waits until handshake is successful
87 if args.pubkey_hash is None:
88 print 'Getting payout address from bitcoind...'
89 my_script = yield get_payout_script2(bitcoind, net)
91 print 'Computing payout script from provided address....'
92 my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
94 print ' Payout script:', bitcoin_data.script2_to_human(my_script, net)
97 ht = bitcoin_p2p.HeightTracker(bitcoind, factory)
99 tracker = p2pool_data.OkayTracker(net)
100 shared_share_hashes = set()
101 ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
102 known_verified = set()
103 print "Loading shares..."
104 for i, (mode, contents) in enumerate(ss.get_shares()):
106 if contents.hash in tracker.shares:
108 shared_share_hashes.add(contents.hash)
109 contents.time_seen = 0
110 tracker.add(contents)
111 if len(tracker.shares) % 1000 == 0 and tracker.shares:
112 print " %i" % (len(tracker.shares),)
113 elif mode == 'verified_hash':
114 known_verified.add(contents)
116 raise AssertionError()
117 print " ...inserting %i verified shares..." % (len(known_verified),)
118 for h in known_verified:
119 if h not in tracker.shares:
120 ss.forget_verified_share(h)
122 tracker.verified.add(tracker.shares[h])
123 print " ...done loading %i shares!" % (len(tracker.shares),)
125 tracker.removed.watch(lambda share: ss.forget_share(share.hash))
126 tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
127 tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
129 peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
131 pre_current_work = variable.Variable(None)
132 pre_current_work2 = variable.Variable(None)
133 pre_merged_work = variable.Variable(None)
134 # information affecting work that should trigger a long-polling update
135 current_work = variable.Variable(None)
136 # information affecting work that should not trigger a long-polling update
137 current_work2 = variable.Variable(None)
139 work_updated = variable.Event()
141 requested = expiring_dict.ExpiringDict(300)
143 @defer.inlineCallbacks
144 def set_real_work1():
145 work = yield getwork(bitcoind)
146 pre_current_work2.set(dict(
148 transactions=work['transactions'],
149 subsidy=work['subsidy'],
150 clock_offset=time.time() - work['time'],
151 last_update=time.time(),
152 )) # second set first because everything hooks on the first
153 pre_current_work.set(dict(
154 version=work['version'],
155 previous_block=work['previous_block_hash'],
156 target=work['target'],
159 def set_real_work2():
160 best, desired = tracker.think(ht, pre_current_work.value['previous_block'], time.time() - pre_current_work2.value['clock_offset'])
162 current_work2.set(pre_current_work2.value)
163 t = dict(pre_current_work.value)
164 t['best_share_hash'] = best
165 t['aux_work'] = pre_merged_work.value
169 for peer2, share_hash in desired:
170 if share_hash not in tracker.tails: # was received in the time tracker.think was running
172 last_request_time, count = requested.get(share_hash, (None, 0))
173 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
175 potential_peers = set()
176 for head in tracker.tails[share_hash]:
177 potential_peers.update(peer_heads.get(head, set()))
178 potential_peers = [peer for peer in potential_peers if peer.connected2]
179 if count == 0 and peer2 is not None and peer2.connected2:
182 peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
186 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
190 stops=list(set(tracker.heads) | set(
191 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
194 requested[share_hash] = t, count + 1
195 pre_current_work.changed.watch(lambda _: set_real_work2())
197 print 'Initializing work...'
198 yield set_real_work1()
202 pre_merged_work.changed.watch(lambda _: set_real_work2())
203 ht.updated.watch(set_real_work2)
205 @defer.inlineCallbacks
206 def set_merged_work():
207 if not args.merged_url:
209 merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
211 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged.rpc_getauxblock)()
212 pre_merged_work.set(dict(
213 hash=int(auxblock['hash'], 16),
214 target=bitcoin_data.HashType().unpack(auxblock['target'].decode('hex')),
215 chain_id=auxblock['chainid'],
217 yield deferral.sleep(1)
220 start_time = time.time() - current_work2.value['clock_offset']
222 # setup p2p logic and join p2pool network
224 def p2p_shares(shares, peer=None):
226 print 'Processing %i shares...' % (len(shares),)
230 if share.hash in tracker.shares:
231 #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
236 #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
240 if shares and peer is not None:
241 peer_heads.setdefault(shares[0].hash, set()).add(peer)
247 print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
249 @tracker.verified.added.watch
251 if share.pow_hash <= share.header['target']:
252 if factory.conn.value is not None:
253 factory.conn.value.send_block(block=share.as_block(tracker))
255 print 'No bitcoind connection! Erp!'
257 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool_data.format_hash(share.hash), share.header_hash,)
260 def p2p_share_hashes(share_hashes, peer):
263 for share_hash in share_hashes:
264 if share_hash in tracker.shares:
266 last_request_time, count = requested.get(share_hash, (None, 0))
267 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
269 print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
270 get_hashes.append(share_hash)
271 requested[share_hash] = t, count + 1
273 if share_hashes and peer is not None:
274 peer_heads.setdefault(share_hashes[0], set()).add(peer)
276 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
278 def p2p_get_shares(share_hashes, parents, stops, peer):
279 parents = min(parents, 1000//len(share_hashes))
282 for share_hash in share_hashes:
283 for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))):
284 if share.hash in stops:
287 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
288 peer.sendShares(shares)
290 print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
294 ip, port = x.split(':')
297 return x, net.P2P_PORT
300 ('72.14.191.28', net.P2P_PORT),
301 ('62.204.197.159', net.P2P_PORT),
302 ('142.58.248.28', net.P2P_PORT),
303 ('94.23.34.145', net.P2P_PORT),
307 'dabuttonfactory.com',
310 nodes.add(((yield reactor.resolve(host)), net.P2P_PORT))
312 log.err(None, 'Error resolving bootstrap node IP:')
314 if net.NAME == 'litecoin':
315 nodes.add(((yield reactor.resolve('liteco.in')), net.P2P_PORT))
319 addrs = dict(eval(x) for x in open(os.path.join(datadir_path, 'addrs.txt')))
321 print "error reading addrs"
324 open(os.path.join(datadir_path, 'addrs.txt'), 'w').writelines(repr(x) + '\n' for x in addrs.iteritems())
325 task.LoopingCall(save_addrs).start(60)
328 current_work=current_work,
329 port=args.p2pool_port,
332 preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
334 p2p_node.handle_shares = p2p_shares
335 p2p_node.handle_share_hashes = p2p_share_hashes
336 p2p_node.handle_get_shares = p2p_get_shares
340 # send share when the chain changes to their chain
341 def work_changed(new_work):
342 #print 'Work changed:', new_work
344 for share in tracker.get_chain(new_work['best_share_hash'], tracker.get_height(new_work['best_share_hash'])):
345 if share.hash in shared_share_hashes:
347 shared_share_hashes.add(share.hash)
350 for peer in p2p_node.peers.itervalues():
351 peer.sendShares([share for share in shares if share.peer is not peer])
353 current_work.changed.watch(work_changed)
356 for share in tracker.get_chain(current_work.value['best_share_hash'], min(tracker.get_height(current_work.value['best_share_hash']), 2*net.CHAIN_LENGTH)):
358 if share.hash in tracker.verified.shares:
359 ss.add_verified_hash(share.hash)
360 task.LoopingCall(save_shares).start(60)
365 @defer.inlineCallbacks
369 is_lan, lan_ip = yield ipdiscover.get_local_ip()
371 pm = yield portmapper.get_port_mapper()
372 yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
373 except defer.TimeoutError:
377 log.err(None, "UPnP error:")
378 yield deferral.sleep(random.expovariate(1/120))
383 # start listening for workers with a JSON-RPC server
385 print 'Listening for workers on port %i...' % (args.worker_port,)
389 merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
390 run_identifier = struct.pack('<I', random.randrange(2**32))
392 share_counter = skiplists.CountsSkipList(tracker, run_identifier)
393 removed_unstales = set()
394 def get_share_counts(doa=False):
395 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
396 matching_in_chain = share_counter(current_work.value['best_share_hash'], height) | removed_unstales
397 shares_in_chain = my_shares & matching_in_chain
398 stale_shares = my_shares - matching_in_chain
400 stale_doa_shares = stale_shares & doa_shares
401 stale_not_doa_shares = stale_shares - stale_doa_shares
402 return len(shares_in_chain) + len(stale_shares), len(stale_doa_shares), len(stale_not_doa_shares)
403 return len(shares_in_chain) + len(stale_shares), len(stale_shares)
404 @tracker.verified.removed.watch
406 if share.hash in my_shares and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
407 removed_unstales.add(share.hash)
410 def get_payout_script_from_username(request):
411 user = worker_interface.get_username(request)
415 return bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(user, net))
419 def compute(request):
420 state = current_work.value
421 user = worker_interface.get_username(request)
423 payout_script = get_payout_script_from_username(request)
424 if payout_script is None or random.uniform(0, 100) < args.worker_fee:
425 payout_script = my_script
427 if len(p2p_node.peers) == 0 and net.PERSIST:
428 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
429 if state['best_share_hash'] is None and net.PERSIST:
430 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
431 if time.time() > current_work2.value['last_update'] + 60:
432 raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
434 previous_share = None if state['best_share_hash'] is None else tracker.shares[state['best_share_hash']]
435 subsidy = current_work2.value['subsidy']
436 share_info, generate_tx = p2pool_data.generate_transaction(
439 previous_share_hash=state['best_share_hash'],
440 coinbase='' if state['aux_work'] is None else '\xfa\xbemm' + bitcoin_data.HashType().pack(state['aux_work']['hash'])[::-1] + struct.pack('<ii', 1, 0),
441 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
442 new_script=payout_script,
444 donation=math.perfect_round(65535*args.donation_percentage/100),
445 stale_frac=(lambda shares, stales:
446 255 if shares == 0 else math.perfect_round(254*stales/shares)
447 )(*get_share_counts()),
449 block_target=state['target'],
450 desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
454 print 'New work for worker %s! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (
456 bitcoin_data.target_to_difficulty(share_info['target']),
457 (sum(t['value'] for t in generate_tx['tx_outs'] if t['script'] == payout_script) - subsidy//200)*1e-8, net.BITCOIN_SYMBOL,
458 subsidy*1e-8, net.BITCOIN_SYMBOL,
459 len(current_work2.value['transactions']),
462 transactions = [generate_tx] + list(current_work2.value['transactions'])
463 merkle_root = bitcoin_data.merkle_hash(transactions)
464 merkle_root_to_transactions[merkle_root] = share_info, transactions, time.time()
466 return bitcoin_getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, current_work2.value['time'], state['target'], share_info['target']), state['best_share_hash']
471 def got_response(header, request):
473 user = worker_interface.get_username(request)
474 # match up with transactions
475 xxx = merkle_root_to_transactions.get(header['merkle_root'], None)
477 print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
479 share_info, transactions, getwork_time = xxx
481 hash_ = bitcoin_data.block_header_type.hash256(header)
483 pow_hash = net.BITCOIN_POW_FUNC(header)
485 if pow_hash <= header['target'] or p2pool.DEBUG:
486 if factory.conn.value is not None:
487 factory.conn.value.send_block(block=dict(header=header, txs=transactions))
489 print 'No bitcoind connection! Erp!'
490 if pow_hash <= header['target']:
492 print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
495 if current_work.value['aux_work'] is not None and pow_hash <= current_work.value['aux_work']['target']:
501 merkle_branch=[x['hash'] for x in p2pool_data.calculate_merkle_branch(transactions, 0)],
506 parent_block_header=header,
509 a, b = transactions[0]['tx_ins'][0]['script'][-32-8:-8].encode('hex'), bitcoin_data.aux_pow_type.pack(aux_pow).encode('hex')
511 merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
513 print "MERGED RESULT:", res
514 merged.rpc_getauxblock(a, b).addBoth(_)
516 log.err(None, 'Error while processing merged mining POW:')
518 target = share_info['target']
519 if pow_hash > target:
520 print 'Worker submitted share with hash > target:\nhash : %x\ntarget: %x' % (pow_hash, target)
522 share = p2pool_data.Share(net, header, share_info, other_txs=transactions[1:])
523 my_shares.add(share.hash)
524 if share.previous_hash != current_work.value['best_share_hash']:
525 doa_shares.add(share.hash)
526 print 'GOT SHARE! %s %s prev %s age %.2fs' % (user, p2pool_data.format_hash(share.hash), p2pool_data.format_hash(share.previous_hash), time.time() - getwork_time) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
527 good = share.previous_hash == current_work.value['best_share_hash']
528 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
530 # eg. good = share.hash == current_work.value['best_share_hash'] here
533 log.err(None, 'Error processing data received from worker:')
536 web_root = worker_interface.WorkerInterface(compute, got_response, current_work.changed)
539 if current_work.value['best_share_hash'] is not None:
540 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
541 att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720))
542 fracs = [share.stale_frac for share in tracker.get_chain(current_work.value['best_share_hash'], min(120, height)) if share.stale_frac is not None]
543 return json.dumps(int(att_s / (1. - (math.median(fracs) if fracs else 0))))
544 return json.dumps(None)
547 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
548 weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
550 for script in sorted(weights, key=lambda s: weights[s]):
551 res[bitcoin_data.script2_to_human(script, net)] = weights[script]/total_weight
552 return json.dumps(res)
554 class WebInterface(resource.Resource):
555 def __init__(self, func, mime_type):
556 self.func, self.mime_type = func, mime_type
558 def render_GET(self, request):
559 request.setHeader('Content-Type', self.mime_type)
562 web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
563 web_root.putChild('users', WebInterface(get_users, 'application/json'))
564 web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
566 web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
568 reactor.listenTCP(args.worker_port, server.Site(web_root))
575 # do new getwork when a block is heard on the p2p interface
577 def new_block(block_hash):
578 work_updated.happened()
579 factory.new_block.watch(new_block)
581 print 'Started successfully!'
584 @defer.inlineCallbacks
587 flag = work_updated.get_deferred()
589 yield set_real_work1()
592 yield defer.DeferredList([flag, deferral.sleep(random.uniform(1, 10))], fireOnOneCallback=True)
594 @defer.inlineCallbacks
601 yield deferral.sleep(random.expovariate(1/20))
607 if hasattr(signal, 'SIGALRM'):
608 def watchdog_handler(signum, frame):
609 print 'Watchdog timer went off at:'
610 traceback.print_stack()
612 signal.signal(signal.SIGALRM, watchdog_handler)
613 task.LoopingCall(signal.alarm, 30).start(1)
618 yield deferral.sleep(3)
620 if time.time() > current_work2.value['last_update'] + 60:
621 print '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
622 if current_work.value['best_share_hash'] is not None:
623 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
625 att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720))
626 weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
627 shares, stale_doa_shares, stale_not_doa_shares = get_share_counts(True)
628 stale_shares = stale_doa_shares + stale_not_doa_shares
629 fracs = [share.stale_frac for share in tracker.get_chain(current_work.value['best_share_hash'], min(120, height)) if share.stale_frac is not None]
630 this_str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i' % (
631 math.format(int(att_s / (1. - (math.median(fracs) if fracs else 0)))),
633 len(tracker.verified.shares),
635 weights.get(my_script, 0)/total_weight*100,
636 math.format(int(weights.get(my_script, 0)*att_s//total_weight / (1. - (math.median(fracs) if fracs else 0)))),
638 stale_not_doa_shares,
641 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
643 med = math.median(fracs)
644 this_str += '\nPool stales: %i%%' % (int(100*med+.5),)
647 this_str += u' Own: %i±%i%%' % tuple(int(100*x+.5) for x in math.interval_to_center_radius(math.binomial_conf_interval(stale_shares, shares, conf)))
649 this_str += u' Own efficiency: %i±%i%%' % tuple(int(100*x+.5) for x in math.interval_to_center_radius((1 - y)/(1 - med) for y in math.binomial_conf_interval(stale_shares, shares, conf)[::-1]))
650 if this_str != last_str or time.time() > last_time + 15:
653 last_time = time.time()
659 log.err(None, 'Fatal error:')
664 class FixedArgumentParser(argparse.ArgumentParser):
665 def _read_args_from_files(self, arg_strings):
666 # expand arguments referencing files
668 for arg_string in arg_strings:
670 # for regular arguments, just add them back into the list
671 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
672 new_arg_strings.append(arg_string)
674 # replace arguments referencing files with the file content
677 args_file = open(arg_string[1:])
680 for arg_line in args_file.read().splitlines():
681 for arg in self.convert_arg_line_to_args(arg_line):
682 arg_strings.append(arg)
683 arg_strings = self._read_args_from_files(arg_strings)
684 new_arg_strings.extend(arg_strings)
688 err = sys.exc_info()[1]
691 # return the modified argument list
692 return new_arg_strings
694 def convert_arg_line_to_args(self, arg_line):
695 return [arg for arg in arg_line.split() if arg.strip()]
697 parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
698 parser.add_argument('--version', action='version', version=p2pool.__version__)
699 parser.add_argument('--net',
700 help='use specified network (default: bitcoin)',
701 action='store', choices=sorted(networks.realnets), default='bitcoin', dest='net_name')
702 parser.add_argument('--testnet',
703 help='''use the network's testnet''',
704 action='store_const', const=True, default=False, dest='testnet')
705 parser.add_argument('--debug',
706 help='enable debugging mode',
707 action='store_const', const=True, default=False, dest='debug')
708 parser.add_argument('-a', '--address',
709 help='generate payouts to this address (default: <address requested from bitcoind>)',
710 type=str, action='store', default=None, dest='address')
711 parser.add_argument('--logfile',
712 help='''log to this file (default: data/<NET>/log)''',
713 type=str, action='store', default=None, dest='logfile')
714 parser.add_argument('--merged-url',
715 help='call getauxblock on this url to get work for merged mining (example: http://127.0.0.1:10332/)',
716 type=str, action='store', default=None, dest='merged_url')
717 parser.add_argument('--merged-userpass',
718 help='use this user and password when requesting merged mining work (example: ncuser:ncpass)',
719 type=str, action='store', default=None, dest='merged_userpass')
720 parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
721 help='donate this percentage of work to author of p2pool (default: 0.5)',
722 type=float, action='store', default=0.5, dest='donation_percentage')
724 p2pool_group = parser.add_argument_group('p2pool interface')
725 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
726 help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.P2P_PORT) for _, n in sorted(networks.realnets.items())),
727 type=int, action='store', default=None, dest='p2pool_port')
728 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
729 help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
730 type=str, action='append', default=[], dest='p2pool_nodes')
731 parser.add_argument('--disable-upnp',
732 help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
733 action='store_false', default=True, dest='upnp')
735 worker_group = parser.add_argument_group('worker interface')
736 worker_group.add_argument('-w', '--worker-port', metavar='PORT',
737 help='listen on PORT for RPC connections from miners (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.WORKER_PORT) for _, n in sorted(networks.realnets.items())),
738 type=int, action='store', default=None, dest='worker_port')
739 worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
740 help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
741 type=float, action='store', default=0, dest='worker_fee')
743 bitcoind_group = parser.add_argument_group('bitcoind interface')
744 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
745 help='connect to this address (default: 127.0.0.1)',
746 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
747 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
748 help='''connect to JSON-RPC interface at this port (default: %s)''' % ', '.join('%s:%i' % (n.NAME, n.BITCOIN_RPC_PORT) for _, n in sorted(networks.realnets.items())),
749 type=int, action='store', default=None, dest='bitcoind_rpc_port')
750 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
751 help='''connect to P2P interface at this port (default: %s)''' % ', '.join('%s:%i' % (n.NAME, n.BITCOIN_P2P_PORT) for _, n in sorted(networks.realnets.items())),
752 type=int, action='store', default=None, dest='bitcoind_p2p_port')
754 bitcoind_group.add_argument(metavar='BITCOIND_RPCUSER',
755 help='bitcoind RPC interface username (default: <empty>)',
756 type=str, action='store', default='', nargs='?', dest='bitcoind_rpc_username')
757 bitcoind_group.add_argument(metavar='BITCOIND_RPCPASSWORD',
758 help='bitcoind RPC interface password',
759 type=str, action='store', dest='bitcoind_rpc_password')
761 args = parser.parse_args()
766 net = networks.nets[args.net_name + ('_testnet' if args.testnet else '')]
768 datadir_path = os.path.join(os.path.dirname(sys.argv[0]), 'data', net.NAME)
769 if not os.path.exists(datadir_path):
770 os.makedirs(datadir_path)
772 if args.logfile is None:
773 args.logfile = os.path.join(datadir_path, 'log')
775 class EncodeReplacerPipe(object):
776 def __init__(self, inner_file):
777 self.inner_file = inner_file
779 def write(self, data):
780 if isinstance(data, unicode):
781 data = data.encode(self.inner_file.encoding, 'replace')
782 self.inner_file.write(data)
784 self.inner_file.flush()
785 class LogFile(object):
786 def __init__(self, filename):
787 self.filename = filename
788 self.inner_file = None
791 if self.inner_file is not None:
792 self.inner_file.close()
793 open(self.filename, 'a').close()
794 f = open(self.filename, 'rb')
795 f.seek(0, os.SEEK_END)
797 if length > 100*1000*1000:
798 f.seek(-1000*1000, os.SEEK_END)
800 if f.read(1) in ('', '\n'):
804 f = open(self.filename, 'wb')
807 self.inner_file = codecs.open(self.filename, 'a', 'utf-8')
808 def write(self, data):
809 self.inner_file.write(data)
811 self.inner_file.flush()
812 class TeePipe(object):
813 def __init__(self, outputs):
814 self.outputs = outputs
815 def write(self, data):
816 for output in self.outputs:
819 for output in self.outputs:
821 class TimestampingPipe(object):
822 def __init__(self, inner_file):
823 self.inner_file = inner_file
826 def write(self, data):
827 buf = self.buf + data
828 lines = buf.split('\n')
829 for line in lines[:-1]:
830 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
831 self.inner_file.flush()
835 class AbortPipe(object):
836 def __init__(self, inner_file):
837 self.inner_file = inner_file
839 def write(self, data):
841 self.inner_file.write(data)
843 sys.stdout = sys.__stdout__
844 log.DefaultObserver.stderr = sys.stderr = sys.__stderr__
847 self.inner_file.flush()
848 logfile = LogFile(args.logfile)
849 sys.stdout = sys.stderr = log.DefaultObserver.stderr = AbortPipe(TimestampingPipe(TeePipe([EncodeReplacerPipe(sys.stderr), logfile])))
850 if hasattr(signal, "SIGUSR1"):
851 def sigusr1(signum, frame):
852 print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
854 print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
855 signal.signal(signal.SIGUSR1, sigusr1)
856 task.LoopingCall(logfile.reopen).start(5)
858 if args.bitcoind_rpc_port is None:
859 args.bitcoind_rpc_port = net.BITCOIN_RPC_PORT
861 if args.bitcoind_p2p_port is None:
862 args.bitcoind_p2p_port = net.BITCOIN_P2P_PORT
864 if args.p2pool_port is None:
865 args.p2pool_port = net.P2P_PORT
867 if args.worker_port is None:
868 args.worker_port = net.WORKER_PORT
870 if args.address is not None:
872 args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net)
874 parser.error('error parsing address: ' + repr(e))
876 args.pubkey_hash = None
878 if (args.merged_url is None) ^ (args.merged_userpass is None):
879 parser.error('must specify --merged-url and --merged-userpass')
881 reactor.callWhenRunning(main, args, net, datadir_path)