4 from __future__ import division
18 from twisted.internet import defer, reactor, task
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
23 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
24 from bitcoin import worker_interface
25 from util import expiring_dict, jsonrpc, variable, deferral, math
26 from . import p2p, skiplists, networks
27 import p2pool, p2pool.data as p2pool_data
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind):
32 work = yield bitcoind.rpc_getmemorypool()
33 defer.returnValue(dict(
34 version=work['version'],
35 previous_block_hash=int(work['previousblockhash'], 16),
36 transactions=[bitcoin_data.tx_type.unpack(x.decode('hex')) for x in work['transactions']],
37 subsidy=work['coinbasevalue'],
39 target=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
42 @deferral.retry('Error creating payout script:', 10)
43 @defer.inlineCallbacks
44 def get_payout_script2(bitcoind, net):
45 address = yield bitcoind.rpc_getaccountaddress('p2pool')
46 validate_response = yield bitcoind.rpc_validateaddress(address)
47 if 'pubkey' not in validate_response:
48 print ' Pubkey request failed. Falling back to payout to address.'
49 defer.returnValue(bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(address, net)))
50 pubkey = validate_response['pubkey'].decode('hex')
51 defer.returnValue(bitcoin_data.pubkey_to_script2(pubkey))
53 @defer.inlineCallbacks
56 print 'p2pool (version %s)' % (p2pool.__version__,)
62 print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
65 # connect to bitcoind over JSON-RPC and do initial getmemorypool
66 url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
67 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
68 bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
69 good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.BITCOIN_RPC_CHECK)(bitcoind)
71 print " Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
73 temp_work = yield getwork(bitcoind)
75 print ' Current block hash: %x' % (temp_work['previous_block_hash'],)
78 # connect to bitcoind over bitcoin-p2p
79 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
80 factory = bitcoin_p2p.ClientFactory(net)
81 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
82 yield factory.getProtocol() # waits until handshake is successful
86 if args.pubkey_hash is None:
87 print 'Getting payout address from bitcoind...'
88 my_script = yield get_payout_script2(bitcoind, net)
90 print 'Computing payout script from provided address....'
91 my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
93 print ' Payout script:', bitcoin_data.script2_to_human(my_script, net)
96 ht = bitcoin_p2p.HeightTracker(bitcoind, factory)
98 tracker = p2pool_data.OkayTracker(net)
99 shared_share_hashes = set()
100 ss = p2pool_data.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), net.NAME + '_shares.'), net)
101 known_verified = set()
102 print "Loading shares..."
103 for i, (mode, contents) in enumerate(ss.get_shares()):
105 if contents.hash in tracker.shares:
107 shared_share_hashes.add(contents.hash)
108 contents.time_seen = 0
109 tracker.add(contents)
110 if len(tracker.shares) % 1000 == 0 and tracker.shares:
111 print " %i" % (len(tracker.shares),)
112 elif mode == 'verified_hash':
113 known_verified.add(contents)
115 raise AssertionError()
116 print " ...inserting %i verified shares..." % (len(known_verified),)
117 for h in known_verified:
118 if h not in tracker.shares:
119 ss.forget_verified_share(h)
121 tracker.verified.add(tracker.shares[h])
122 print " ...done loading %i shares!" % (len(tracker.shares),)
124 tracker.removed.watch(lambda share: ss.forget_share(share.hash))
125 tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
126 tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
128 peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
130 pre_current_work = variable.Variable(None)
131 pre_current_work2 = variable.Variable(None)
132 pre_merged_work = variable.Variable(None)
133 # information affecting work that should trigger a long-polling update
134 current_work = variable.Variable(None)
135 # information affecting work that should not trigger a long-polling update
136 current_work2 = variable.Variable(None)
138 work_updated = variable.Event()
140 requested = expiring_dict.ExpiringDict(300)
142 @defer.inlineCallbacks
143 def set_real_work1():
144 work = yield getwork(bitcoind)
145 pre_current_work2.set(dict(
147 transactions=work['transactions'],
148 subsidy=work['subsidy'],
149 clock_offset=time.time() - work['time'],
150 last_update=time.time(),
151 )) # second set first because everything hooks on the first
152 pre_current_work.set(dict(
153 version=work['version'],
154 previous_block=work['previous_block_hash'],
155 target=work['target'],
158 def set_real_work2():
159 best, desired = tracker.think(ht, pre_current_work.value['previous_block'], time.time() - pre_current_work2.value['clock_offset'])
161 current_work2.set(pre_current_work2.value)
162 t = dict(pre_current_work.value)
163 t['best_share_hash'] = best
164 t['aux_work'] = pre_merged_work.value
168 for peer2, share_hash in desired:
169 if share_hash not in tracker.tails: # was received in the time tracker.think was running
171 last_request_time, count = requested.get(share_hash, (None, 0))
172 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
174 potential_peers = set()
175 for head in tracker.tails[share_hash]:
176 potential_peers.update(peer_heads.get(head, set()))
177 potential_peers = [peer for peer in potential_peers if peer.connected2]
178 if count == 0 and peer2 is not None and peer2.connected2:
181 peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
185 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
189 stops=list(set(tracker.heads) | set(
190 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
193 requested[share_hash] = t, count + 1
194 pre_current_work.changed.watch(lambda _: set_real_work2())
196 print 'Initializing work...'
197 yield set_real_work1()
201 pre_merged_work.changed.watch(lambda _: set_real_work2())
202 ht.updated.watch(set_real_work2)
204 @defer.inlineCallbacks
205 def set_merged_work():
206 if not args.merged_url:
208 merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
210 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged.rpc_getauxblock)()
211 pre_merged_work.set(dict(
212 hash=int(auxblock['hash'], 16),
213 target=bitcoin_data.HashType().unpack(auxblock['target'].decode('hex')),
214 chain_id=auxblock['chainid'],
216 yield deferral.sleep(1)
219 start_time = time.time() - current_work2.value['clock_offset']
221 # setup p2p logic and join p2pool network
223 def p2p_shares(shares, peer=None):
225 print 'Processing %i shares...' % (len(shares),)
229 if share.hash in tracker.shares:
230 #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
235 #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
239 if shares and peer is not None:
240 peer_heads.setdefault(shares[0].hash, set()).add(peer)
246 print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
248 @tracker.verified.added.watch
250 if share.pow_hash <= share.header['target']:
251 if factory.conn.value is not None:
252 factory.conn.value.send_block(block=share.as_block(tracker, net))
254 print 'No bitcoind connection! Erp!'
256 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool_data.format_hash(share.hash), share.header_hash,)
259 def p2p_share_hashes(share_hashes, peer):
262 for share_hash in share_hashes:
263 if share_hash in tracker.shares:
265 last_request_time, count = requested.get(share_hash, (None, 0))
266 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
268 print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
269 get_hashes.append(share_hash)
270 requested[share_hash] = t, count + 1
272 if share_hashes and peer is not None:
273 peer_heads.setdefault(share_hashes[0], set()).add(peer)
275 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
277 def p2p_get_shares(share_hashes, parents, stops, peer):
278 parents = min(parents, 1000//len(share_hashes))
281 for share_hash in share_hashes:
282 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
283 if share.hash in stops:
286 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
287 peer.sendShares(shares)
289 print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
293 ip, port = x.split(':')
296 return x, net.P2P_PORT
299 ('72.14.191.28', net.P2P_PORT),
300 ('62.204.197.159', net.P2P_PORT),
301 ('142.58.248.28', net.P2P_PORT),
302 ('94.23.34.145', net.P2P_PORT),
306 'dabuttonfactory.com',
309 nodes.add(((yield reactor.resolve(host)), net.P2P_PORT))
311 log.err(None, 'Error resolving bootstrap node IP:')
313 if net.NAME == 'litecoin':
314 nodes.add(((yield reactor.resolve('liteco.in')), net.P2P_PORT))
318 addrs = dict(eval(x) for x in open(os.path.join(os.path.dirname(sys.argv[0]), net.NAME + '_addrs.txt')))
320 print "error reading addrs"
323 open(os.path.join(os.path.dirname(sys.argv[0]), net.NAME + '_addrs.txt'), 'w').writelines(repr(x) + '\n' for x in addrs.iteritems())
324 task.LoopingCall(save_addrs).start(60)
327 current_work=current_work,
328 port=args.p2pool_port,
331 preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
333 p2p_node.handle_shares = p2p_shares
334 p2p_node.handle_share_hashes = p2p_share_hashes
335 p2p_node.handle_get_shares = p2p_get_shares
339 # send share when the chain changes to their chain
340 def work_changed(new_work):
341 #print 'Work changed:', new_work
343 for share in tracker.get_chain_known(new_work['best_share_hash']):
344 if share.hash in shared_share_hashes:
346 shared_share_hashes.add(share.hash)
349 for peer in p2p_node.peers.itervalues():
350 peer.sendShares([share for share in shares if share.peer is not peer])
352 current_work.changed.watch(work_changed)
355 for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 2*net.CHAIN_LENGTH):
357 if share.hash in tracker.verified.shares:
358 ss.add_verified_hash(share.hash)
359 task.LoopingCall(save_shares).start(60)
364 @defer.inlineCallbacks
368 is_lan, lan_ip = yield ipdiscover.get_local_ip()
370 pm = yield portmapper.get_port_mapper()
371 yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
372 except defer.TimeoutError:
376 log.err(None, "UPnP error:")
377 yield deferral.sleep(random.expovariate(1/120))
382 # start listening for workers with a JSON-RPC server
384 print 'Listening for workers on port %i...' % (args.worker_port,)
388 merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
389 run_identifier = struct.pack('<I', random.randrange(2**32))
391 share_counter = skiplists.CountsSkipList(tracker, run_identifier)
392 removed_unstales = set()
393 def get_share_counts(doa=False):
394 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
395 matching_in_chain = share_counter(current_work.value['best_share_hash'], height) | removed_unstales
396 shares_in_chain = my_shares & matching_in_chain
397 stale_shares = my_shares - matching_in_chain
399 stale_doa_shares = stale_shares & doa_shares
400 stale_not_doa_shares = stale_shares - stale_doa_shares
401 return len(shares_in_chain) + len(stale_shares), len(stale_doa_shares), len(stale_not_doa_shares)
402 return len(shares_in_chain) + len(stale_shares), len(stale_shares)
403 @tracker.verified.removed.watch
405 if share.hash in my_shares and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
406 removed_unstales.add(share.hash)
409 def get_payout_script_from_username(request):
410 user = worker_interface.get_username(request)
414 return bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(user, net))
418 def compute(request):
419 state = current_work.value
421 payout_script = get_payout_script_from_username(request)
422 if payout_script is None or random.uniform(0, 100) < args.worker_fee:
423 payout_script = my_script
425 if len(p2p_node.peers) == 0 and net.PERSIST:
426 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
427 if state['best_share_hash'] is None and net.PERSIST:
428 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
429 if time.time() > current_work2.value['last_update'] + 60:
430 raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
432 previous_share = None if state['best_share_hash'] is None else tracker.shares[state['best_share_hash']]
433 subsidy = current_work2.value['subsidy']
434 share_info, generate_tx = p2pool_data.generate_transaction(
437 previous_share_hash=state['best_share_hash'],
438 coinbase='' if state['aux_work'] is None else '\xfa\xbemm' + bitcoin_data.HashType().pack(state['aux_work']['hash'])[::-1] + struct.pack('<ii', 1, 0),
439 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
440 new_script=payout_script,
442 donation=math.perfect_round(65535*args.donation_percentage/100),
443 stale_frac=(lambda shares, stales:
444 255 if shares == 0 else math.perfect_round(254*stales/shares)
445 )(*get_share_counts()),
447 block_target=state['target'],
448 desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
452 print 'New work for worker! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (
453 bitcoin_data.target_to_difficulty(share_info['target']),
454 (sum(t['value'] for t in generate_tx['tx_outs'] if t['script'] == payout_script) - subsidy//200)*1e-8, net.BITCOIN_SYMBOL,
455 subsidy*1e-8, net.BITCOIN_SYMBOL,
456 len(current_work2.value['transactions']),
459 transactions = [generate_tx] + list(current_work2.value['transactions'])
460 merkle_root = bitcoin_data.merkle_hash(transactions)
461 merkle_root_to_transactions[merkle_root] = share_info, transactions, time.time()
463 return bitcoin_getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, current_work2.value['time'], state['target'], share_info['target']), state['best_share_hash']
468 def got_response(header, request):
470 user = worker_interface.get_username(request)
471 # match up with transactions
472 xxx = merkle_root_to_transactions.get(header['merkle_root'], None)
474 print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
476 share_info, transactions, getwork_time = xxx
478 hash_ = bitcoin_data.block_header_type.hash256(header)
480 pow_hash = net.BITCOIN_POW_FUNC(header)
482 if pow_hash <= header['target'] or p2pool.DEBUG:
483 if factory.conn.value is not None:
484 factory.conn.value.send_block(block=dict(header=header, txs=transactions))
486 print 'No bitcoind connection! Erp!'
487 if pow_hash <= header['target']:
489 print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
492 if current_work.value['aux_work'] is not None and pow_hash <= current_work.value['aux_work']['target']:
498 merkle_branch=[x['hash'] for x in p2pool_data.calculate_merkle_branch(transactions, 0)],
503 parent_block_header=header,
506 a, b = transactions[0]['tx_ins'][0]['script'][-32-8:-8].encode('hex'), bitcoin_data.aux_pow_type.pack(aux_pow).encode('hex')
508 merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
510 print "MERGED RESULT:", res
511 merged.rpc_getauxblock(a, b).addBoth(_)
513 log.err(None, 'Error while processing merged mining POW:')
515 target = share_info['target']
516 if pow_hash > target:
517 print 'Worker submitted share with hash > target:\nhash : %x\ntarget: %x' % (pow_hash, target)
519 share = p2pool_data.Share(net, header, share_info, other_txs=transactions[1:])
520 my_shares.add(share.hash)
521 if share.previous_hash != current_work.value['best_share_hash']:
522 doa_shares.add(share.hash)
523 print 'GOT SHARE! %s %s prev %s age %.2fs' % (user, p2pool_data.format_hash(share.hash), p2pool_data.format_hash(share.previous_hash), time.time() - getwork_time) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
524 good = share.previous_hash == current_work.value['best_share_hash']
525 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
527 # eg. good = share.hash == current_work.value['best_share_hash'] here
530 log.err(None, 'Error processing data received from worker:')
533 web_root = worker_interface.WorkerInterface(compute, got_response, current_work.changed)
536 if current_work.value['best_share_hash'] is not None:
537 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
538 att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720))
539 fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
540 return json.dumps(int(att_s / (1. - (math.median(fracs) if fracs else 0))))
541 return json.dumps(None)
544 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
545 weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
547 for script in sorted(weights, key=lambda s: weights[s]):
548 res[bitcoin_data.script2_to_human(script, net)] = weights[script]/total_weight
549 return json.dumps(res)
551 class WebInterface(resource.Resource):
552 def __init__(self, func, mime_type):
553 self.func, self.mime_type = func, mime_type
555 def render_GET(self, request):
556 request.setHeader('Content-Type', self.mime_type)
559 web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
560 web_root.putChild('users', WebInterface(get_users, 'application/json'))
561 web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
563 web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
565 reactor.listenTCP(args.worker_port, server.Site(web_root))
572 # do new getwork when a block is heard on the p2p interface
574 def new_block(block_hash):
575 work_updated.happened()
576 factory.new_block.watch(new_block)
578 print 'Started successfully!'
581 @defer.inlineCallbacks
584 flag = work_updated.get_deferred()
586 yield set_real_work1()
589 yield defer.DeferredList([flag, deferral.sleep(random.uniform(1, 10))], fireOnOneCallback=True)
591 @defer.inlineCallbacks
598 yield deferral.sleep(random.expovariate(1/20))
604 if hasattr(signal, 'SIGALRM'):
605 def watchdog_handler(signum, frame):
606 print 'Watchdog timer went off at:'
607 traceback.print_stack()
609 signal.signal(signal.SIGALRM, watchdog_handler)
610 task.LoopingCall(signal.alarm, 30).start(1)
615 yield deferral.sleep(3)
617 if time.time() > current_work2.value['last_update'] + 60:
618 print '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
619 if current_work.value['best_share_hash'] is not None:
620 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
622 att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720))
623 weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
624 shares, stale_doa_shares, stale_not_doa_shares = get_share_counts(True)
625 stale_shares = stale_doa_shares + stale_not_doa_shares
626 fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
627 this_str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i' % (
628 math.format(int(att_s / (1. - (math.median(fracs) if fracs else 0)))),
630 len(tracker.verified.shares),
632 weights.get(my_script, 0)/total_weight*100,
633 math.format(int(weights.get(my_script, 0)*att_s//total_weight / (1. - (math.median(fracs) if fracs else 0)))),
635 stale_not_doa_shares,
638 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
640 med = math.median(fracs)
641 this_str += '\nPool stales: %i%%' % (int(100*med+.5),)
644 this_str += ' Own: %i±%i%%' % tuple(int(100*x+.5) for x in math.interval_to_center_radius(math.binomial_conf_interval(stale_shares, shares, conf)))
646 this_str += ' Own efficiency: %i±%i%%' % tuple(int(100*x+.5) for x in math.interval_to_center_radius((1 - y)/(1 - med) for y in math.binomial_conf_interval(stale_shares, shares, conf)[::-1]))
647 this_str += ' (%i%% confidence)' % (int(100*conf+.5),)
648 if this_str != last_str or time.time() > last_time + 15:
651 last_time = time.time()
657 log.err(None, 'Fatal error:')
662 class FixedArgumentParser(argparse.ArgumentParser):
663 def _read_args_from_files(self, arg_strings):
664 # expand arguments referencing files
666 for arg_string in arg_strings:
668 # for regular arguments, just add them back into the list
669 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
670 new_arg_strings.append(arg_string)
672 # replace arguments referencing files with the file content
675 args_file = open(arg_string[1:])
678 for arg_line in args_file.read().splitlines():
679 for arg in self.convert_arg_line_to_args(arg_line):
680 arg_strings.append(arg)
681 arg_strings = self._read_args_from_files(arg_strings)
682 new_arg_strings.extend(arg_strings)
686 err = sys.exc_info()[1]
689 # return the modified argument list
690 return new_arg_strings
692 def convert_arg_line_to_args(self, arg_line):
693 return [arg for arg in arg_line.split() if arg.strip()]
695 parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
696 parser.add_argument('--version', action='version', version=p2pool.__version__)
697 parser.add_argument('--net',
698 help='use specified network (default: bitcoin)',
699 action='store', choices=sorted(networks.realnets), default='bitcoin', dest='net_name')
700 parser.add_argument('--testnet',
701 help='''use the network's testnet''',
702 action='store_const', const=True, default=False, dest='testnet')
703 parser.add_argument('--debug',
704 help='debugging mode',
705 action='store_const', const=True, default=False, dest='debug')
706 parser.add_argument('-a', '--address',
707 help='generate to this address (defaults to requesting one from bitcoind)',
708 type=str, action='store', default=None, dest='address')
709 parser.add_argument('--logfile',
710 help='''log to specific file (defaults to <network_name>.log in run_p2pool.py's directory)''',
711 type=str, action='store', default=None, dest='logfile')
712 parser.add_argument('--merged-url',
713 help='call getauxblock on this url to get work for merged mining',
714 type=str, action='store', default=None, dest='merged_url')
715 parser.add_argument('--merged-userpass',
716 help='merge daemon user and password, separated by a colon. Example: ncuser:ncpass',
717 type=str, action='store', default=None, dest='merged_userpass')
718 parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
719 help='percentage amount to donate to author of p2pool. Default: 0.5',
720 type=float, action='store', default=0.5, dest='donation_percentage')
722 p2pool_group = parser.add_argument_group('p2pool interface')
723 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
724 help='use TCP port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.P2P_PORT) for _, n in sorted(networks.realnets.items())),
725 type=int, action='store', default=None, dest='p2pool_port')
726 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
727 help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to default p2pool P2P port), in addition to builtin addresses',
728 type=str, action='append', default=[], dest='p2pool_nodes')
729 parser.add_argument('--disable-upnp',
730 help='''don't attempt to forward p2pool P2P port from the WAN to this computer using UPnP''',
731 action='store_false', default=True, dest='upnp')
733 worker_group = parser.add_argument_group('worker interface')
734 worker_group.add_argument('-w', '--worker-port', metavar='PORT',
735 help='listen on PORT for RPC connections from miners asking for work and providing responses (default:%s)' % ', '.join('%s:%i' % (n.NAME, n.WORKER_PORT) for _, n in sorted(networks.realnets.items())),
736 type=int, action='store', default=None, dest='worker_port')
737 worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
738 help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee . default: 0''',
739 type=float, action='store', default=0, dest='worker_fee')
741 bitcoind_group = parser.add_argument_group('bitcoind interface')
742 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
743 help='connect to a bitcoind at this address (default: 127.0.0.1)',
744 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
745 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
746 help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getmemorypool (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.BITCOIN_RPC_PORT) for _, n in sorted(networks.realnets.items())),
747 type=int, action='store', default=None, dest='bitcoind_rpc_port')
748 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
749 help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.BITCOIN_P2P_PORT) for _, n in sorted(networks.realnets.items())),
750 type=int, action='store', default=None, dest='bitcoind_p2p_port')
752 bitcoind_group.add_argument(metavar='BITCOIND_RPCUSER',
753 help='bitcoind RPC interface username (default: empty)',
754 type=str, action='store', default='', nargs='?', dest='bitcoind_rpc_username')
755 bitcoind_group.add_argument(metavar='BITCOIND_RPCPASSWORD',
756 help='bitcoind RPC interface password',
757 type=str, action='store', dest='bitcoind_rpc_password')
759 args = parser.parse_args()
764 net = networks.nets[args.net_name + ('_testnet' if args.testnet else '')]
766 if args.logfile is None:
767 args.logfile = os.path.join(os.path.dirname(sys.argv[0]), net.NAME + '.log')
769 class LogFile(object):
770 def __init__(self, filename):
771 self.filename = filename
772 self.inner_file = None
775 if self.inner_file is not None:
776 self.inner_file.close()
777 open(self.filename, 'a').close()
778 f = open(self.filename, 'rb')
779 f.seek(0, os.SEEK_END)
781 if length > 100*1000*1000:
782 f.seek(-1000*1000, os.SEEK_END)
784 if f.read(1) in ('', '\n'):
788 f = open(self.filename, 'wb')
791 self.inner_file = open(self.filename, 'a')
792 def write(self, data):
793 self.inner_file.write(data)
795 self.inner_file.flush()
796 class TeePipe(object):
797 def __init__(self, outputs):
798 self.outputs = outputs
799 def write(self, data):
800 for output in self.outputs:
803 for output in self.outputs:
805 class TimestampingPipe(object):
806 def __init__(self, inner_file):
807 self.inner_file = inner_file
810 def write(self, data):
811 buf = self.buf + data
812 lines = buf.split('\n')
813 for line in lines[:-1]:
814 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
815 self.inner_file.flush()
819 logfile = LogFile(args.logfile)
820 sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
821 if hasattr(signal, "SIGUSR1"):
822 def sigusr1(signum, frame):
823 print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
825 print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
826 signal.signal(signal.SIGUSR1, sigusr1)
827 task.LoopingCall(logfile.reopen).start(5)
829 if args.bitcoind_rpc_port is None:
830 args.bitcoind_rpc_port = net.BITCOIN_RPC_PORT
832 if args.bitcoind_p2p_port is None:
833 args.bitcoind_p2p_port = net.BITCOIN_P2P_PORT
835 if args.p2pool_port is None:
836 args.p2pool_port = net.P2P_PORT
838 if args.worker_port is None:
839 args.worker_port = net.WORKER_PORT
841 if args.address is not None:
843 args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net)
845 parser.error('error parsing address: ' + repr(e))
847 args.pubkey_hash = None
849 if (args.merged_url is None) ^ (args.merged_userpass is None):
850 parser.error('must specify --merged-url and --merged-userpass')
852 reactor.callWhenRunning(main, args, net)