3 from __future__ import division
18 from twisted.internet import defer, reactor, task
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
23 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
24 from util import db, expiring_dict, jsonrpc, variable, deferral, math
25 from . import p2p, worker_interface, skiplists
26 import p2pool.data as p2pool
27 import p2pool as p2pool_init
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind, ht, net):
32 work = yield bitcoind.rpc_getmemorypool()
33 defer.returnValue(dict(
34 version=work['version'],
35 previous_block_hash=int(work['previousblockhash'], 16),
36 transactions=[bitcoin.data.tx_type.unpack(x.decode('hex')) for x in work['transactions']],
37 subsidy=work['coinbasevalue'],
39 target=bitcoin.data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin.data.FloatingInteger(work['bits']),
42 @deferral.retry('Error getting payout script from bitcoind:', 1)
43 @defer.inlineCallbacks
44 def get_payout_script(factory):
45 res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
46 if res['reply'] == 'success':
47 defer.returnValue(res['script'])
48 elif res['reply'] == 'denied':
49 defer.returnValue(None)
51 raise ValueError('Unexpected reply: %r' % (res,))
53 @deferral.retry('Error creating payout script:', 10)
54 @defer.inlineCallbacks
55 def get_payout_script2(bitcoind, net):
56 defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
58 @defer.inlineCallbacks
61 print 'p2pool (version %s)' % (p2pool_init.__version__,)
67 print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
70 # connect to bitcoind over JSON-RPC and do initial getwork
71 url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
72 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
73 bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
74 good = yield deferral.retry('Error while checking bitcoind identity:', 1)(args.net.BITCOIN_RPC_CHECK)(bitcoind)
76 print " Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
78 temp_work = yield deferral.retry('Error while testing getwork:', 1)(defer.inlineCallbacks(lambda: defer.returnValue(bitcoin.getwork.BlockAttempt.from_getwork((yield bitcoind.rpc_getwork())))))()
80 print ' Current block hash: %x' % (temp_work.previous_block,)
83 # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
84 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
85 factory = bitcoin.p2p.ClientFactory(args.net)
86 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
87 my_script = yield get_payout_script(factory)
88 if args.pubkey_hash is None:
90 print ' IP transaction denied ... falling back to sending to address.'
91 my_script = yield get_payout_script2(bitcoind, args.net)
93 my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
95 print ' Payout script:', bitcoin.data.script2_to_human(my_script, args.net)
98 print 'Loading cached block headers...'
99 ht = bitcoin.p2p.HeightTracker(factory, args.net.NAME + '_headers.dat')
100 print ' ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
103 tracker = p2pool.OkayTracker(args.net)
104 ss = p2pool.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.NAME + '_shares.'), args.net)
105 known_verified = set()
106 print "Loading shares..."
107 for i, (mode, contents) in enumerate(ss.get_shares()):
109 if contents.hash in tracker.shares:
111 contents.shared = True
112 contents.time_seen = 0
113 tracker.add(contents)
114 if len(tracker.shares) % 1000 == 0 and tracker.shares:
115 print " %i" % (len(tracker.shares),)
116 elif mode == 'verified_hash':
117 known_verified.add(contents)
119 raise AssertionError()
120 print " ...inserting %i verified shares..." % (len(known_verified),)
121 for h in known_verified:
122 if h not in tracker.shares:
123 ss.forget_verified_share(h)
125 tracker.verified.add(tracker.shares[h])
126 print " ...done loading %i shares!" % (len(tracker.shares),)
128 tracker.added.watch(lambda share: ss.add_share(share))
129 tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
130 tracker.removed.watch(lambda share: ss.forget_share(share.hash))
131 tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
133 peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
135 # information affecting work that should trigger a long-polling update
136 current_work = variable.Variable(None)
137 # information affecting work that should not trigger a long-polling update
138 current_work2 = variable.Variable(None)
140 work_updated = variable.Event()
142 requested = expiring_dict.ExpiringDict(300)
144 @defer.inlineCallbacks
145 def set_real_work1():
146 work = yield getwork(bitcoind, ht, args.net)
147 changed = work['previous_block_hash'] != current_work.value['previous_block'] if current_work.value is not None else True
148 current_work.set(dict(
149 version=work['version'],
150 previous_block=work['previous_block_hash'],
151 target=work['target'],
152 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
153 aux_work=current_work.value['aux_work'] if current_work.value is not None else None,
155 current_work2.set(dict(
157 transactions=work['transactions'],
158 subsidy=work['subsidy'],
159 clock_offset=time.time() - work['time'],
160 last_update=time.time(),
165 def set_real_work2():
166 best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
168 t = dict(current_work.value)
169 t['best_share_hash'] = best
173 for peer2, share_hash in desired:
174 if share_hash not in tracker.tails: # was received in the time tracker.think was running
176 last_request_time, count = requested.get(share_hash, (None, 0))
177 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
179 potential_peers = set()
180 for head in tracker.tails[share_hash]:
181 potential_peers.update(peer_heads.get(head, set()))
182 potential_peers = [peer for peer in potential_peers if peer.connected2]
183 if count == 0 and peer2 is not None and peer2.connected2:
186 peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
190 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
194 stops=list(set(tracker.heads) | set(
195 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
198 requested[share_hash] = t, count + 1
200 print 'Initializing work...'
201 yield set_real_work1()
206 @defer.inlineCallbacks
207 def set_merged_work():
208 if not args.merged_url:
211 merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
212 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged.rpc_getauxblock)()
213 x = dict(current_work.value)
214 x['aux_work'] = dict(
215 hash=int(auxblock['hash'], 16),
216 target=bitcoin.data.HashType().unpack(auxblock['target'].decode('hex')),
217 chain_id=auxblock['chainid'],
221 yield deferral.sleep(1)
224 start_time = time.time() - current_work2.value['clock_offset']
226 # setup p2p logic and join p2pool network
228 def share_share(share, ignore_peer=None):
229 for peer in p2p_node.peers.itervalues():
230 if peer is ignore_peer:
232 #if p2pool_init.DEBUG:
233 # print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
234 peer.sendShares([share])
237 def p2p_shares(shares, peer=None):
239 print 'Processing %i shares...' % (len(shares),)
243 if share.hash in tracker.shares:
244 #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
249 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
253 if shares and peer is not None:
254 peer_heads.setdefault(shares[0].hash, set()).add(peer)
260 print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*args.net.CHAIN_LENGTH)
262 @tracker.verified.added.watch
264 if share.pow_hash <= share.header['target']:
265 if factory.conn.value is not None:
266 factory.conn.value.send_block(block=share.as_block(tracker, args.net))
268 print 'No bitcoind connection! Erp!'
270 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.header_hash,)
273 def p2p_share_hashes(share_hashes, peer):
276 for share_hash in share_hashes:
277 if share_hash in tracker.shares:
279 last_request_time, count = requested.get(share_hash, (None, 0))
280 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
282 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
283 get_hashes.append(share_hash)
284 requested[share_hash] = t, count + 1
286 if share_hashes and peer is not None:
287 peer_heads.setdefault(share_hashes[0], set()).add(peer)
289 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
291 def p2p_get_shares(share_hashes, parents, stops, peer):
292 parents = min(parents, 1000//len(share_hashes))
295 for share_hash in share_hashes:
296 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
297 if share.hash in stops:
300 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
301 peer.sendShares(shares, full=True)
303 print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
307 ip, port = x.split(':')
310 return x, args.net.P2P_PORT
313 ('72.14.191.28', args.net.P2P_PORT),
314 ('62.204.197.159', args.net.P2P_PORT),
315 ('142.58.248.28', args.net.P2P_PORT),
316 ('94.23.34.145', args.net.P2P_PORT),
320 'dabuttonfactory.com',
323 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
325 log.err(None, 'Error resolving bootstrap node IP:')
327 if args.net_name == 'litecoin':
328 nodes.add(((yield reactor.resolve('liteco.in')), args.net.P2P_PORT))
331 current_work=current_work,
332 port=args.p2pool_port,
334 addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.NAME),
335 mode=0 if args.low_bandwidth else 1,
336 preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
338 p2p_node.handle_shares = p2p_shares
339 p2p_node.handle_share_hashes = p2p_share_hashes
340 p2p_node.handle_get_shares = p2p_get_shares
344 # send share when the chain changes to their chain
345 def work_changed(new_work):
346 #print 'Work changed:', new_work
347 for share in tracker.get_chain_known(new_work['best_share_hash']):
350 share_share(share, share.peer)
351 current_work.changed.watch(work_changed)
356 @defer.inlineCallbacks
360 is_lan, lan_ip = yield ipdiscover.get_local_ip()
362 pm = yield portmapper.get_port_mapper()
363 yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
364 except defer.TimeoutError:
367 if p2pool_init.DEBUG:
368 log.err(None, "UPnP error:")
369 yield deferral.sleep(random.expovariate(1/120))
374 # start listening for workers with a JSON-RPC server
376 print 'Listening for workers on port %i...' % (args.worker_port,)
380 merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
381 run_identifier = struct.pack('<I', random.randrange(2**32))
383 share_counter = skiplists.CountsSkipList(tracker, run_identifier)
384 removed_unstales = set()
385 def get_share_counts(doa=False):
386 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
387 matching_in_chain = share_counter(current_work.value['best_share_hash'], max(0, height - 1)) | removed_unstales
388 shares_in_chain = my_shares & matching_in_chain
389 stale_shares = my_shares - matching_in_chain
391 stale_doa_shares = stale_shares & doa_shares
392 stale_not_doa_shares = stale_shares - stale_doa_shares
393 return len(shares_in_chain) + len(stale_shares), len(stale_doa_shares), len(stale_not_doa_shares)
394 return len(shares_in_chain) + len(stale_shares), len(stale_shares)
395 @tracker.verified.removed.watch
397 if share.hash in my_shares and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
398 removed_unstales.add(share.hash)
400 def compute(state, payout_script):
401 if payout_script is None or random.uniform(0, 100) < args.worker_fee:
402 payout_script = my_script
403 if state['best_share_hash'] is None and args.net.PERSIST:
404 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
405 if len(p2p_node.peers) == 0 and args.net.PERSIST:
406 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
407 if time.time() > current_work2.value['last_update'] + 60:
408 raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
410 if state['aux_work'] is not None:
411 aux_str = '\xfa\xbemm' + bitcoin.data.HashType().pack(state['aux_work']['hash'])[::-1] + struct.pack('<ii', 1, 0)
415 # XXX assuming generate_tx is smallish here..
416 def get_stale_frac():
417 shares, stale_shares = get_share_counts()
420 frac = stale_shares/shares
421 return 2*struct.pack('<H', int(65535*frac + .5))
422 subsidy = current_work2.value['subsidy']
425 if int(time.time() - current_work2.value['clock_offset']) >= p2pool.TRANSITION_TIME:
426 timestamp = current_work2.value['time']
428 previous_share = tracker.shares[state['best_share_hash']] if state['best_share_hash'] is not None else None
429 new_share_info, generate_tx = p2pool.new_generate_transaction(
432 previous_share_hash=state['best_share_hash'],
434 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
435 new_script=payout_script,
437 donation=math.perfect_round(65535*args.donation_percentage/100),
438 timestamp=math.clip(int(time.time() - current_work2.value['clock_offset']),
439 (previous_share.timestamp - 60, previous_share.timestamp + 60),
440 ) if previous_share is not None else int(time.time() - current_work2.value['clock_offset']),
441 stale_frac=(lambda shares, stales:
442 255 if shares == 0 else math.perfect_round(254*stales/shares)
443 )(*get_share_counts()),
445 block_target=state['target'],
449 timestamp = int(time.time() - current_work2.value['clock_offset'])
450 if state['best_share_hash'] is not None:
451 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
452 if timestamp2 > timestamp:
453 print 'Toff', timestamp2 - timestamp
454 timestamp = timestamp2
456 share_info, generate_tx = p2pool.generate_transaction(
458 previous_share_hash=state['best_share_hash'],
459 new_script=payout_script,
461 nonce=run_identifier + struct.pack('<H', random.randrange(2**16)) + aux_str + get_stale_frac(),
462 block_target=state['target'],
466 print 'New work for worker! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (bitcoin.data.target_to_difficulty((new_share_info if is_new else share_info['share_data'])['target']), (generate_tx['tx_outs'][-1]['value']-subsidy//200)*1e-8, args.net.BITCOIN_SYMBOL, subsidy*1e-8, args.net.BITCOIN_SYMBOL, len(current_work2.value['transactions']))
467 #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
468 #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
469 transactions = [generate_tx] + list(current_work2.value['transactions'])
470 merkle_root = bitcoin.data.merkle_hash(transactions)
471 merkle_root_to_transactions[merkle_root] = is_new, new_share_info if is_new else share_info, transactions
473 target2 = (new_share_info if is_new else share_info['share_data'])['target']
474 times[merkle_root] = time.time()
475 #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
476 return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
482 def got_response(data, user):
484 # match up with transactions
485 header = bitcoin.getwork.decode_data(data)
486 xxx = merkle_root_to_transactions.get(header['merkle_root'], None)
488 print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
490 is_new, share_info, transactions = xxx
491 new_share_info = share_info
493 hash_ = bitcoin.data.block_header_type.hash256(header)
495 pow_hash = args.net.BITCOIN_POW_FUNC(header)
497 if pow_hash <= header['target'] or p2pool_init.DEBUG:
498 if factory.conn.value is not None:
499 factory.conn.value.send_block(block=dict(header=header, txs=transactions))
501 print 'No bitcoind connection! Erp!'
502 if pow_hash <= header['target']:
504 print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
507 if current_work.value['aux_work'] is not None and pow_hash <= current_work.value['aux_work']['target']:
513 merkle_branch=[x['hash'] for x in p2pool.calculate_merkle_branch(transactions, 0)],
518 parent_block_header=header,
521 a, b = transactions[0]['tx_ins'][0]['script'][-32-8:-8].encode('hex'), bitcoin.data.aux_pow_type.pack(aux_pow).encode('hex')
523 merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
525 print "MERGED RESULT:", res
526 merged.rpc_getauxblock(a, b).addBoth(_)
528 log.err(None, 'Error while processing merged mining POW:')
530 target = (new_share_info if is_new else share_info['share_data'])['target']
531 if pow_hash > target:
532 print 'Worker submitted share with hash > target:\nhash : %x\ntarget: %x' % (pow_hash, target)
535 share = p2pool.NewShare(args.net, header, new_share_info, other_txs=transactions[1:])
537 share = p2pool.Share(args.net, header, share_info, other_txs=transactions[1:])
538 my_shares.add(share.hash)
539 if share.previous_hash != current_work.value['best_share_hash']:
540 doa_shares.add(share.hash)
541 print 'GOT SHARE! %s %s prev %s age %.2fs' % (user, p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[header['merkle_root']]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
542 good = share.previous_hash == current_work.value['best_share_hash']
543 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
545 # eg. good = share.hash == current_work.value['best_share_hash'] here
548 log.err(None, 'Error processing data received from worker:')
551 web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
554 if current_work.value['best_share_hash'] is not None:
555 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
556 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 720))
557 fracs = [read_stale_frac(share) for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if read_stale_frac(share) is not None]
558 return json.dumps(int(att_s / (1. - (math.median(fracs) if fracs else 0))))
559 return json.dumps(None)
562 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
563 weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
565 for script in sorted(weights, key=lambda s: weights[s]):
566 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
567 return json.dumps(res)
569 class WebInterface(resource.Resource):
570 def __init__(self, func, mime_type):
571 self.func, self.mime_type = func, mime_type
573 def render_GET(self, request):
574 request.setHeader('Content-Type', self.mime_type)
577 web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
578 web_root.putChild('users', WebInterface(get_users, 'application/json'))
579 web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
581 web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
583 reactor.listenTCP(args.worker_port, server.Site(web_root))
590 # do new getwork when a block is heard on the p2p interface
592 def new_block(block_hash):
593 work_updated.happened()
594 factory.new_block.watch(new_block)
596 print 'Started successfully!'
599 ht.updated.watch(set_real_work2)
601 @defer.inlineCallbacks
604 flag = work_updated.get_deferred()
606 yield set_real_work1()
609 yield defer.DeferredList([flag, deferral.sleep(random.uniform(1, 10))], fireOnOneCallback=True)
611 @defer.inlineCallbacks
618 yield deferral.sleep(random.expovariate(1/20))
624 if hasattr(signal, 'SIGALRM'):
625 def watchdog_handler(signum, frame):
626 print 'Watchdog timer went off at:'
627 traceback.print_stack()
629 signal.signal(signal.SIGALRM, watchdog_handler)
630 task.LoopingCall(signal.alarm, 30).start(1)
633 def read_stale_frac(share):
634 if isinstance(share, p2pool.NewShare):
635 return share.share_data['stale_frac']/254 if share.share_data['stale_frac'] != 255 else None
636 if len(share.nonce) < 4:
638 a, b = struct.unpack("<HH", share.nonce[-4:])
645 yield deferral.sleep(3)
647 if time.time() > current_work2.value['last_update'] + 60:
648 print '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
649 if current_work.value['best_share_hash'] is not None:
650 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
652 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 720))
653 weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
654 shares, stale_doa_shares, stale_not_doa_shares = get_share_counts(True)
655 stale_shares = stale_doa_shares + stale_not_doa_shares
656 fracs = [read_stale_frac(share) for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if read_stale_frac(share) is not None]
657 str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i' % (
658 math.format(int(att_s / (1. - (math.median(fracs) if fracs else 0)))),
660 len(tracker.verified.shares),
662 weights.get(my_script, 0)/total_weight*100,
663 math.format(int(weights.get(my_script, 0)*att_s//total_weight / (1. - (math.median(fracs) if fracs else 0)))),
665 stale_not_doa_shares,
668 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
669 if (str != pool_str):
673 med = math.median(fracs)
674 print 'Median stale proportion:', med
676 print ' Own:', stale_shares/shares
678 print ' Own efficiency: %.02f%%' % (100*(1 - stale_shares/shares)/(1 - med),)
684 log.err(None, 'Fatal error:')
689 class FixedArgumentParser(argparse.ArgumentParser):
690 def _read_args_from_files(self, arg_strings):
691 # expand arguments referencing files
693 for arg_string in arg_strings:
695 # for regular arguments, just add them back into the list
696 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
697 new_arg_strings.append(arg_string)
699 # replace arguments referencing files with the file content
702 args_file = open(arg_string[1:])
705 for arg_line in args_file.read().splitlines():
706 for arg in self.convert_arg_line_to_args(arg_line):
707 arg_strings.append(arg)
708 arg_strings = self._read_args_from_files(arg_strings)
709 new_arg_strings.extend(arg_strings)
713 err = sys.exc_info()[1]
716 # return the modified argument list
717 return new_arg_strings
719 def convert_arg_line_to_args(self, arg_line):
720 return [arg for arg in arg_line.split() if arg.strip()]
722 parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,), fromfile_prefix_chars='@')
723 parser.add_argument('--version', action='version', version=p2pool_init.__version__)
724 parser.add_argument('--net',
725 help='use specified network (default: bitcoin)',
726 action='store', choices=sorted(x for x in p2pool.nets if 'testnet' not in x), default='bitcoin', dest='net_name')
727 parser.add_argument('--testnet',
728 help='''use the network's testnet''',
729 action='store_const', const=True, default=False, dest='testnet')
730 parser.add_argument('--debug',
731 help='debugging mode',
732 action='store_const', const=True, default=False, dest='debug')
733 parser.add_argument('-a', '--address',
734 help='generate to this address (defaults to requesting one from bitcoind)',
735 type=str, action='store', default=None, dest='address')
736 parser.add_argument('--logfile',
737 help='''log to specific file (defaults to <network_name>.log in run_p2pool.py's directory)''',
738 type=str, action='store', default=None, dest='logfile')
739 parser.add_argument('--merged-url',
740 help='call getauxblock on this url to get work for merged mining',
741 type=str, action='store', default=None, dest='merged_url')
742 parser.add_argument('--merged-userpass',
743 help='merge daemon user and password, separated by a colon. Example: ncuser:ncpass',
744 type=str, action='store', default=None, dest='merged_userpass')
745 parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
746 help='percentage amount to donate to author of p2pool. Default: 0.5',
747 type=float, action='store', default=0.5, dest='donation_percentage')
749 p2pool_group = parser.add_argument_group('p2pool interface')
750 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
751 help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
752 type=int, action='store', default=None, dest='p2pool_port')
753 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
754 help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
755 type=str, action='append', default=[], dest='p2pool_nodes')
756 parser.add_argument('-l', '--low-bandwidth',
757 help='trade lower bandwidth usage for higher latency (reduced efficiency)',
758 action='store_true', default=False, dest='low_bandwidth')
759 parser.add_argument('--disable-upnp',
760 help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
761 action='store_false', default=True, dest='upnp')
763 worker_group = parser.add_argument_group('worker interface')
764 worker_group.add_argument('-w', '--worker-port', metavar='PORT',
765 help='listen on PORT for RPC connections from miners asking for work and providing responses (default: bitcoin: 9332 namecoin: 9331 ixcoin: 9330 i0coin: 9329 solidcoin: 9328 litecoin: 9327, +10000 for testnets)',
766 type=int, action='store', default=None, dest='worker_port')
767 worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
768 help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:9332/fee . default: 0''',
769 type=float, action='store', default=0, dest='worker_fee')
771 bitcoind_group = parser.add_argument_group('bitcoind interface')
772 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
773 help='connect to a bitcoind at this address (default: 127.0.0.1)',
774 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
775 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
776 help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332 ixcoin: 8338 i0coin: 7332 litecoin: 9332)',
777 type=int, action='store', default=None, dest='bitcoind_rpc_port')
778 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
779 help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 namecoin: 8334 ixcoin: 8337 i0coin: 7333 solidcoin: 7555 litecoin: 9333, +10000 for testnets)',
780 type=int, action='store', default=None, dest='bitcoind_p2p_port')
782 bitcoind_group.add_argument(metavar='BITCOIND_RPCUSER',
783 help='bitcoind RPC interface username (default: empty)',
784 type=str, action='store', default='', nargs='?', dest='bitcoind_rpc_username')
785 bitcoind_group.add_argument(metavar='BITCOIND_RPCPASSWORD',
786 help='bitcoind RPC interface password',
787 type=str, action='store', dest='bitcoind_rpc_password')
789 args = parser.parse_args()
792 p2pool_init.DEBUG = True
794 if args.logfile is None:
795 args.logfile = os.path.join(os.path.dirname(sys.argv[0]), args.net_name + ('_testnet' if args.testnet else '') + '.log')
797 class LogFile(object):
798 def __init__(self, filename):
799 self.filename = filename
800 self.inner_file = None
803 if self.inner_file is not None:
804 self.inner_file.close()
805 open(self.filename, 'a').close()
806 f = open(self.filename, 'rb')
807 f.seek(0, os.SEEK_END)
809 if length > 100*1000*1000:
810 f.seek(-1000*1000, os.SEEK_END)
812 if f.read(1) in ('', '\n'):
816 f = open(self.filename, 'wb')
819 self.inner_file = open(self.filename, 'a')
820 def write(self, data):
821 self.inner_file.write(data)
823 self.inner_file.flush()
824 class TeePipe(object):
825 def __init__(self, outputs):
826 self.outputs = outputs
827 def write(self, data):
828 for output in self.outputs:
831 for output in self.outputs:
833 class TimestampingPipe(object):
834 def __init__(self, inner_file):
835 self.inner_file = inner_file
838 def write(self, data):
839 buf = self.buf + data
840 lines = buf.split('\n')
841 for line in lines[:-1]:
842 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
843 self.inner_file.flush()
847 logfile = LogFile(args.logfile)
848 sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
849 if hasattr(signal, "SIGUSR1"):
850 def sigusr1(signum, frame):
851 print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
853 print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
854 signal.signal(signal.SIGUSR1, sigusr1)
855 task.LoopingCall(logfile.reopen).start(5)
857 args.net = p2pool.nets[args.net_name + ('_testnet' if args.testnet else '')]
859 if args.bitcoind_rpc_port is None:
860 args.bitcoind_rpc_port = args.net.BITCOIN_RPC_PORT
862 if args.bitcoind_p2p_port is None:
863 args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
865 if args.p2pool_port is None:
866 args.p2pool_port = args.net.P2P_PORT
868 if args.worker_port is None:
869 args.worker_port = args.net.WORKER_PORT
871 if args.address is not None:
873 args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
875 parser.error('error parsing address: ' + repr(e))
877 args.pubkey_hash = None
879 if (args.merged_url is None) ^ (args.merged_userpass is None):
880 parser.error('must specify --merged-url and --merged-userpass')
882 reactor.callWhenRunning(main, args)