3 from __future__ import division
18 from twisted.internet import defer, reactor, task
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
23 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
24 from util import db, expiring_dict, jsonrpc, variable, deferral, math
25 from . import p2p, worker_interface, skiplists
26 import p2pool.data as p2pool
27 import p2pool as p2pool_init
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind, ht, net):
32 work = yield bitcoind.rpc_getmemorypool()
33 defer.returnValue(dict(
34 version=work['version'],
35 previous_block_hash=int(work['previousblockhash'], 16),
36 transactions=[bitcoin.data.tx_type.unpack(x.decode('hex')) for x in work['transactions']],
37 subsidy=work['coinbasevalue'],
39 target=bitcoin.data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin.data.FloatingInteger(work['bits']),
42 @deferral.retry('Error getting payout script from bitcoind:', 1)
43 @defer.inlineCallbacks
44 def get_payout_script(factory):
45 res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
46 if res['reply'] == 'success':
47 defer.returnValue(res['script'])
48 elif res['reply'] == 'denied':
49 defer.returnValue(None)
51 raise ValueError('Unexpected reply: %r' % (res,))
53 @deferral.retry('Error creating payout script:', 10)
54 @defer.inlineCallbacks
55 def get_payout_script2(bitcoind, net):
56 defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
58 @defer.inlineCallbacks
61 print 'p2pool (version %s)' % (p2pool_init.__version__,)
67 print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
70 # connect to bitcoind over JSON-RPC and do initial getwork
71 url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
72 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
73 bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
74 good = yield deferral.retry('Error while checking bitcoind identity:', 1)(args.net.BITCOIN_RPC_CHECK)(bitcoind)
76 print " Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
78 temp_work = yield deferral.retry('Error while testing getwork:', 1)(defer.inlineCallbacks(lambda: defer.returnValue(bitcoin.getwork.BlockAttempt.from_getwork((yield bitcoind.rpc_getwork())))))()
80 print ' Current block hash: %x' % (temp_work.previous_block,)
83 # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
84 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
85 factory = bitcoin.p2p.ClientFactory(args.net)
86 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
87 my_script = yield get_payout_script(factory)
88 if args.pubkey_hash is None:
90 print ' IP transaction denied ... falling back to sending to address.'
91 my_script = yield get_payout_script2(bitcoind, args.net)
93 my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
95 print ' Payout script:', bitcoin.data.script2_to_human(my_script, args.net)
98 print 'Loading cached block headers...'
99 ht = bitcoin.p2p.HeightTracker(factory, args.net.NAME + '_headers.dat')
100 print ' ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
103 tracker = p2pool.OkayTracker(args.net)
104 shared_share_hashes = set()
105 ss = p2pool.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.NAME + '_shares.'), args.net)
106 known_verified = set()
107 print "Loading shares..."
108 for i, (mode, contents) in enumerate(ss.get_shares()):
110 if contents.hash in tracker.shares:
112 shared_share_hashes.add(contents.hash)
113 contents.time_seen = 0
114 tracker.add(contents)
115 if len(tracker.shares) % 1000 == 0 and tracker.shares:
116 print " %i" % (len(tracker.shares),)
117 elif mode == 'verified_hash':
118 known_verified.add(contents)
120 raise AssertionError()
121 print " ...inserting %i verified shares..." % (len(known_verified),)
122 for h in known_verified:
123 if h not in tracker.shares:
124 ss.forget_verified_share(h)
126 tracker.verified.add(tracker.shares[h])
127 print " ...done loading %i shares!" % (len(tracker.shares),)
129 tracker.added.watch(lambda share: ss.add_share(share))
130 tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
131 tracker.removed.watch(lambda share: ss.forget_share(share.hash))
132 tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
133 tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
135 peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
137 # information affecting work that should trigger a long-polling update
138 current_work = variable.Variable(None)
139 # information affecting work that should not trigger a long-polling update
140 current_work2 = variable.Variable(None)
142 work_updated = variable.Event()
144 requested = expiring_dict.ExpiringDict(300)
146 @defer.inlineCallbacks
147 def set_real_work1():
148 work = yield getwork(bitcoind, ht, args.net)
149 changed = work['previous_block_hash'] != current_work.value['previous_block'] if current_work.value is not None else True
150 current_work.set(dict(
151 version=work['version'],
152 previous_block=work['previous_block_hash'],
153 target=work['target'],
154 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
155 aux_work=current_work.value['aux_work'] if current_work.value is not None else None,
157 current_work2.set(dict(
159 transactions=work['transactions'],
160 subsidy=work['subsidy'],
161 clock_offset=time.time() - work['time'],
162 last_update=time.time(),
167 def set_real_work2():
168 best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
170 t = dict(current_work.value)
171 t['best_share_hash'] = best
175 for peer2, share_hash in desired:
176 if share_hash not in tracker.tails: # was received in the time tracker.think was running
178 last_request_time, count = requested.get(share_hash, (None, 0))
179 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
181 potential_peers = set()
182 for head in tracker.tails[share_hash]:
183 potential_peers.update(peer_heads.get(head, set()))
184 potential_peers = [peer for peer in potential_peers if peer.connected2]
185 if count == 0 and peer2 is not None and peer2.connected2:
188 peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
192 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
196 stops=list(set(tracker.heads) | set(
197 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
200 requested[share_hash] = t, count + 1
202 print 'Initializing work...'
203 yield set_real_work1()
208 @defer.inlineCallbacks
209 def set_merged_work():
210 if not args.merged_url:
213 merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
214 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged.rpc_getauxblock)()
215 x = dict(current_work.value)
216 x['aux_work'] = dict(
217 hash=int(auxblock['hash'], 16),
218 target=bitcoin.data.HashType().unpack(auxblock['target'].decode('hex')),
219 chain_id=auxblock['chainid'],
223 yield deferral.sleep(1)
226 start_time = time.time() - current_work2.value['clock_offset']
228 # setup p2p logic and join p2pool network
230 def share_share(share, ignore_peer=None):
231 for peer in p2p_node.peers.itervalues():
232 if peer is ignore_peer:
234 #if p2pool_init.DEBUG:
235 # print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
236 peer.sendShares([share])
237 shared_share_hashes.add(share.hash)
239 def p2p_shares(shares, peer=None):
241 print 'Processing %i shares...' % (len(shares),)
245 if share.hash in tracker.shares:
246 #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
251 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
255 if shares and peer is not None:
256 peer_heads.setdefault(shares[0].hash, set()).add(peer)
262 print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*args.net.CHAIN_LENGTH)
264 @tracker.verified.added.watch
266 if share.pow_hash <= share.header['target']:
267 if factory.conn.value is not None:
268 factory.conn.value.send_block(block=share.as_block(tracker, args.net))
270 print 'No bitcoind connection! Erp!'
272 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.header_hash,)
275 def p2p_share_hashes(share_hashes, peer):
278 for share_hash in share_hashes:
279 if share_hash in tracker.shares:
281 last_request_time, count = requested.get(share_hash, (None, 0))
282 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
284 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
285 get_hashes.append(share_hash)
286 requested[share_hash] = t, count + 1
288 if share_hashes and peer is not None:
289 peer_heads.setdefault(share_hashes[0], set()).add(peer)
291 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
293 def p2p_get_shares(share_hashes, parents, stops, peer):
294 parents = min(parents, 1000//len(share_hashes))
297 for share_hash in share_hashes:
298 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
299 if share.hash in stops:
302 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
303 peer.sendShares(shares, full=True)
305 print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
309 ip, port = x.split(':')
312 return x, args.net.P2P_PORT
315 ('72.14.191.28', args.net.P2P_PORT),
316 ('62.204.197.159', args.net.P2P_PORT),
317 ('142.58.248.28', args.net.P2P_PORT),
318 ('94.23.34.145', args.net.P2P_PORT),
322 'dabuttonfactory.com',
325 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
327 log.err(None, 'Error resolving bootstrap node IP:')
329 if args.net_name == 'litecoin':
330 nodes.add(((yield reactor.resolve('liteco.in')), args.net.P2P_PORT))
333 current_work=current_work,
334 port=args.p2pool_port,
336 addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.NAME),
337 mode=0 if args.low_bandwidth else 1,
338 preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
340 p2p_node.handle_shares = p2p_shares
341 p2p_node.handle_share_hashes = p2p_share_hashes
342 p2p_node.handle_get_shares = p2p_get_shares
346 # send share when the chain changes to their chain
347 def work_changed(new_work):
348 #print 'Work changed:', new_work
349 for share in tracker.get_chain_known(new_work['best_share_hash']):
350 if share.hash in shared_share_hashes:
352 share_share(share, share.peer)
353 current_work.changed.watch(work_changed)
358 @defer.inlineCallbacks
362 is_lan, lan_ip = yield ipdiscover.get_local_ip()
364 pm = yield portmapper.get_port_mapper()
365 yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
366 except defer.TimeoutError:
369 if p2pool_init.DEBUG:
370 log.err(None, "UPnP error:")
371 yield deferral.sleep(random.expovariate(1/120))
376 # start listening for workers with a JSON-RPC server
378 print 'Listening for workers on port %i...' % (args.worker_port,)
382 merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
383 run_identifier = struct.pack('<I', random.randrange(2**32))
385 share_counter = skiplists.CountsSkipList(tracker, run_identifier)
386 removed_unstales = set()
387 def get_share_counts(doa=False):
388 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
389 matching_in_chain = share_counter(current_work.value['best_share_hash'], height) | removed_unstales
390 shares_in_chain = my_shares & matching_in_chain
391 stale_shares = my_shares - matching_in_chain
393 stale_doa_shares = stale_shares & doa_shares
394 stale_not_doa_shares = stale_shares - stale_doa_shares
395 return len(shares_in_chain) + len(stale_shares), len(stale_doa_shares), len(stale_not_doa_shares)
396 return len(shares_in_chain) + len(stale_shares), len(stale_shares)
397 @tracker.verified.removed.watch
399 if share.hash in my_shares and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
400 removed_unstales.add(share.hash)
402 def compute(state, payout_script):
403 if payout_script is None or random.uniform(0, 100) < args.worker_fee:
404 payout_script = my_script
405 if state['best_share_hash'] is None and args.net.PERSIST:
406 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
407 if len(p2p_node.peers) == 0 and args.net.PERSIST:
408 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
409 if time.time() > current_work2.value['last_update'] + 60:
410 raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
412 if state['aux_work'] is not None:
413 aux_str = '\xfa\xbemm' + bitcoin.data.HashType().pack(state['aux_work']['hash'])[::-1] + struct.pack('<ii', 1, 0)
417 # XXX assuming generate_tx is smallish here..
418 def get_stale_frac():
419 shares, stale_shares = get_share_counts()
422 frac = stale_shares/shares
423 return 2*struct.pack('<H', int(65535*frac + .5))
424 subsidy = current_work2.value['subsidy']
427 if int(time.time() - current_work2.value['clock_offset']) >= p2pool.TRANSITION_TIME:
428 timestamp = current_work2.value['time']
430 previous_share = tracker.shares[state['best_share_hash']] if state['best_share_hash'] is not None else None
431 new_share_info, generate_tx = p2pool.new_generate_transaction(
434 previous_share_hash=state['best_share_hash'],
436 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
437 new_script=payout_script,
439 donation=math.perfect_round(65535*args.donation_percentage/100),
440 stale_frac=(lambda shares, stales:
441 255 if shares == 0 else math.perfect_round(254*stales/shares)
442 )(*get_share_counts()),
444 block_target=state['target'],
445 desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
449 timestamp = int(time.time() - current_work2.value['clock_offset'])
450 if state['best_share_hash'] is not None:
451 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
452 if timestamp2 > timestamp:
453 print 'Toff', timestamp2 - timestamp
454 timestamp = timestamp2
456 share_info, generate_tx = p2pool.generate_transaction(
458 previous_share_hash=state['best_share_hash'],
459 new_script=payout_script,
461 nonce=run_identifier + struct.pack('<H', random.randrange(2**16)) + aux_str + get_stale_frac(),
462 block_target=state['target'],
466 print 'New work for worker! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (bitcoin.data.target_to_difficulty((new_share_info if is_new else share_info['share_data'])['target']), (sum(t['value'] for t in generate_tx['tx_outs'] if t['script'] == payout_script) -subsidy//200)*1e-8, args.net.BITCOIN_SYMBOL, subsidy*1e-8, args.net.BITCOIN_SYMBOL, len(current_work2.value['transactions']))
467 #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
468 #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
469 transactions = [generate_tx] + list(current_work2.value['transactions'])
470 merkle_root = bitcoin.data.merkle_hash(transactions)
471 merkle_root_to_transactions[merkle_root] = is_new, new_share_info if is_new else share_info, transactions
473 target2 = (new_share_info if is_new else share_info['share_data'])['target']
474 times[merkle_root] = time.time()
475 #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
476 return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
482 def got_response(data, user):
484 # match up with transactions
485 header = bitcoin.getwork.decode_data(data)
486 xxx = merkle_root_to_transactions.get(header['merkle_root'], None)
488 print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
490 is_new, share_info, transactions = xxx
491 new_share_info = share_info
493 hash_ = bitcoin.data.block_header_type.hash256(header)
495 pow_hash = args.net.BITCOIN_POW_FUNC(header)
497 if pow_hash <= header['target'] or p2pool_init.DEBUG:
498 if factory.conn.value is not None:
499 factory.conn.value.send_block(block=dict(header=header, txs=transactions))
501 print 'No bitcoind connection! Erp!'
502 if pow_hash <= header['target']:
504 print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
507 if current_work.value['aux_work'] is not None and pow_hash <= current_work.value['aux_work']['target']:
513 merkle_branch=[x['hash'] for x in p2pool.calculate_merkle_branch(transactions, 0)],
518 parent_block_header=header,
521 a, b = transactions[0]['tx_ins'][0]['script'][-32-8:-8].encode('hex'), bitcoin.data.aux_pow_type.pack(aux_pow).encode('hex')
523 merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
525 print "MERGED RESULT:", res
526 merged.rpc_getauxblock(a, b).addBoth(_)
528 log.err(None, 'Error while processing merged mining POW:')
530 target = (new_share_info if is_new else share_info['share_data'])['target']
531 if pow_hash > target:
532 print 'Worker submitted share with hash > target:\nhash : %x\ntarget: %x' % (pow_hash, target)
535 share = p2pool.NewShare(args.net, header, new_share_info, other_txs=transactions[1:])
537 share = p2pool.Share(args.net, header, share_info, other_txs=transactions[1:])
538 my_shares.add(share.hash)
539 if share.previous_hash != current_work.value['best_share_hash']:
540 doa_shares.add(share.hash)
541 print 'GOT SHARE! %s %s prev %s age %.2fs' % (user, p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[header['merkle_root']]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
542 good = share.previous_hash == current_work.value['best_share_hash']
543 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
545 # eg. good = share.hash == current_work.value['best_share_hash'] here
548 log.err(None, 'Error processing data received from worker:')
551 web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
554 if current_work.value['best_share_hash'] is not None:
555 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
556 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 720))
557 fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
558 return json.dumps(int(att_s / (1. - (math.median(fracs) if fracs else 0))))
559 return json.dumps(None)
562 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
563 weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
565 for script in sorted(weights, key=lambda s: weights[s]):
566 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
567 return json.dumps(res)
569 class WebInterface(resource.Resource):
570 def __init__(self, func, mime_type):
571 self.func, self.mime_type = func, mime_type
573 def render_GET(self, request):
574 request.setHeader('Content-Type', self.mime_type)
577 web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
578 web_root.putChild('users', WebInterface(get_users, 'application/json'))
579 web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
581 web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
583 reactor.listenTCP(args.worker_port, server.Site(web_root))
590 # do new getwork when a block is heard on the p2p interface
592 def new_block(block_hash):
593 work_updated.happened()
594 factory.new_block.watch(new_block)
596 print 'Started successfully!'
599 ht.updated.watch(set_real_work2)
601 @defer.inlineCallbacks
604 flag = work_updated.get_deferred()
606 yield set_real_work1()
609 yield defer.DeferredList([flag, deferral.sleep(random.uniform(1, 10))], fireOnOneCallback=True)
611 @defer.inlineCallbacks
618 yield deferral.sleep(random.expovariate(1/20))
624 if hasattr(signal, 'SIGALRM'):
625 def watchdog_handler(signum, frame):
626 print 'Watchdog timer went off at:'
627 traceback.print_stack()
629 signal.signal(signal.SIGALRM, watchdog_handler)
630 task.LoopingCall(signal.alarm, 30).start(1)
635 yield deferral.sleep(3)
637 if time.time() > current_work2.value['last_update'] + 60:
638 print '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
639 if current_work.value['best_share_hash'] is not None:
640 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
642 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 720))
643 weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
644 shares, stale_doa_shares, stale_not_doa_shares = get_share_counts(True)
645 stale_shares = stale_doa_shares + stale_not_doa_shares
646 fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
647 str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i' % (
648 math.format(int(att_s / (1. - (math.median(fracs) if fracs else 0)))),
650 len(tracker.verified.shares),
652 weights.get(my_script, 0)/total_weight*100,
653 math.format(int(weights.get(my_script, 0)*att_s//total_weight / (1. - (math.median(fracs) if fracs else 0)))),
655 stale_not_doa_shares,
658 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
659 if (str != pool_str):
663 med = math.median(fracs)
664 print 'Median stale proportion:', med
666 print ' Own:', stale_shares/shares
668 print ' Own efficiency: %.02f%%' % (100*(1 - stale_shares/shares)/(1 - med),)
674 log.err(None, 'Fatal error:')
679 class FixedArgumentParser(argparse.ArgumentParser):
680 def _read_args_from_files(self, arg_strings):
681 # expand arguments referencing files
683 for arg_string in arg_strings:
685 # for regular arguments, just add them back into the list
686 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
687 new_arg_strings.append(arg_string)
689 # replace arguments referencing files with the file content
692 args_file = open(arg_string[1:])
695 for arg_line in args_file.read().splitlines():
696 for arg in self.convert_arg_line_to_args(arg_line):
697 arg_strings.append(arg)
698 arg_strings = self._read_args_from_files(arg_strings)
699 new_arg_strings.extend(arg_strings)
703 err = sys.exc_info()[1]
706 # return the modified argument list
707 return new_arg_strings
709 def convert_arg_line_to_args(self, arg_line):
710 return [arg for arg in arg_line.split() if arg.strip()]
712 parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,), fromfile_prefix_chars='@')
713 parser.add_argument('--version', action='version', version=p2pool_init.__version__)
714 parser.add_argument('--net',
715 help='use specified network (default: bitcoin)',
716 action='store', choices=sorted(x for x in p2pool.nets if 'testnet' not in x), default='bitcoin', dest='net_name')
717 parser.add_argument('--testnet',
718 help='''use the network's testnet''',
719 action='store_const', const=True, default=False, dest='testnet')
720 parser.add_argument('--debug',
721 help='debugging mode',
722 action='store_const', const=True, default=False, dest='debug')
723 parser.add_argument('-a', '--address',
724 help='generate to this address (defaults to requesting one from bitcoind)',
725 type=str, action='store', default=None, dest='address')
726 parser.add_argument('--logfile',
727 help='''log to specific file (defaults to <network_name>.log in run_p2pool.py's directory)''',
728 type=str, action='store', default=None, dest='logfile')
729 parser.add_argument('--merged-url',
730 help='call getauxblock on this url to get work for merged mining',
731 type=str, action='store', default=None, dest='merged_url')
732 parser.add_argument('--merged-userpass',
733 help='merge daemon user and password, separated by a colon. Example: ncuser:ncpass',
734 type=str, action='store', default=None, dest='merged_userpass')
735 parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
736 help='percentage amount to donate to author of p2pool. Default: 0.5',
737 type=float, action='store', default=0.5, dest='donation_percentage')
739 p2pool_group = parser.add_argument_group('p2pool interface')
740 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
741 help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
742 type=int, action='store', default=None, dest='p2pool_port')
743 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
744 help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
745 type=str, action='append', default=[], dest='p2pool_nodes')
746 parser.add_argument('-l', '--low-bandwidth',
747 help='trade lower bandwidth usage for higher latency (reduced efficiency)',
748 action='store_true', default=False, dest='low_bandwidth')
749 parser.add_argument('--disable-upnp',
750 help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
751 action='store_false', default=True, dest='upnp')
753 worker_group = parser.add_argument_group('worker interface')
754 worker_group.add_argument('-w', '--worker-port', metavar='PORT',
755 help='listen on PORT for RPC connections from miners asking for work and providing responses (default: bitcoin: 9332 namecoin: 9331 ixcoin: 9330 i0coin: 9329 solidcoin: 9328 litecoin: 9327, +10000 for testnets)',
756 type=int, action='store', default=None, dest='worker_port')
757 worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
758 help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:9332/fee . default: 0''',
759 type=float, action='store', default=0, dest='worker_fee')
761 bitcoind_group = parser.add_argument_group('bitcoind interface')
762 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
763 help='connect to a bitcoind at this address (default: 127.0.0.1)',
764 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
765 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
766 help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332 ixcoin: 8338 i0coin: 7332 litecoin: 9332)',
767 type=int, action='store', default=None, dest='bitcoind_rpc_port')
768 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
769 help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 namecoin: 8334 ixcoin: 8337 i0coin: 7333 solidcoin: 7555 litecoin: 9333, +10000 for testnets)',
770 type=int, action='store', default=None, dest='bitcoind_p2p_port')
772 bitcoind_group.add_argument(metavar='BITCOIND_RPCUSER',
773 help='bitcoind RPC interface username (default: empty)',
774 type=str, action='store', default='', nargs='?', dest='bitcoind_rpc_username')
775 bitcoind_group.add_argument(metavar='BITCOIND_RPCPASSWORD',
776 help='bitcoind RPC interface password',
777 type=str, action='store', dest='bitcoind_rpc_password')
779 args = parser.parse_args()
782 p2pool_init.DEBUG = True
784 if args.logfile is None:
785 args.logfile = os.path.join(os.path.dirname(sys.argv[0]), args.net_name + ('_testnet' if args.testnet else '') + '.log')
787 class LogFile(object):
788 def __init__(self, filename):
789 self.filename = filename
790 self.inner_file = None
793 if self.inner_file is not None:
794 self.inner_file.close()
795 open(self.filename, 'a').close()
796 f = open(self.filename, 'rb')
797 f.seek(0, os.SEEK_END)
799 if length > 100*1000*1000:
800 f.seek(-1000*1000, os.SEEK_END)
802 if f.read(1) in ('', '\n'):
806 f = open(self.filename, 'wb')
809 self.inner_file = open(self.filename, 'a')
810 def write(self, data):
811 self.inner_file.write(data)
813 self.inner_file.flush()
814 class TeePipe(object):
815 def __init__(self, outputs):
816 self.outputs = outputs
817 def write(self, data):
818 for output in self.outputs:
821 for output in self.outputs:
823 class TimestampingPipe(object):
824 def __init__(self, inner_file):
825 self.inner_file = inner_file
828 def write(self, data):
829 buf = self.buf + data
830 lines = buf.split('\n')
831 for line in lines[:-1]:
832 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
833 self.inner_file.flush()
837 logfile = LogFile(args.logfile)
838 sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
839 if hasattr(signal, "SIGUSR1"):
840 def sigusr1(signum, frame):
841 print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
843 print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
844 signal.signal(signal.SIGUSR1, sigusr1)
845 task.LoopingCall(logfile.reopen).start(5)
847 args.net = p2pool.nets[args.net_name + ('_testnet' if args.testnet else '')]
849 if args.bitcoind_rpc_port is None:
850 args.bitcoind_rpc_port = args.net.BITCOIN_RPC_PORT
852 if args.bitcoind_p2p_port is None:
853 args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
855 if args.p2pool_port is None:
856 args.p2pool_port = args.net.P2P_PORT
858 if args.worker_port is None:
859 args.worker_port = args.net.WORKER_PORT
861 if args.address is not None:
863 args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
865 parser.error('error parsing address: ' + repr(e))
867 args.pubkey_hash = None
869 if (args.merged_url is None) ^ (args.merged_userpass is None):
870 parser.error('must specify --merged-url and --merged-userpass')
872 reactor.callWhenRunning(main, args)