1 from __future__ import division
15 from twisted.internet import defer, error, reactor, protocol, task
16 from twisted.web import server, resource
17 from twisted.python import log
18 from nattraverso import portmapper, ipdiscover
20 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
21 from bitcoin import worker_interface
22 from util import expiring_dict, jsonrpc, variable, deferral, math, logging, pack
23 from . import p2p, networks, graphs
24 import p2pool, p2pool.data as p2pool_data
26 @deferral.retry('Error getting work from bitcoind:', 3)
27 @defer.inlineCallbacks
28 def getwork(bitcoind):
29 work = yield bitcoind.rpc_getmemorypool()
30 defer.returnValue(dict(
31 version=work['version'],
32 previous_block_hash=int(work['previousblockhash'], 16),
33 transactions=[bitcoin_data.tx_type.unpack(x.decode('hex')) for x in work['transactions']],
34 subsidy=work['coinbasevalue'],
36 bits=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
37 coinbaseflags=work['coinbaseflags'].decode('hex') if 'coinbaseflags' in work else ''.join(x.decode('hex') for x in work['coinbaseaux'].itervalues()) if 'coinbaseaux' in work else '',
40 @defer.inlineCallbacks
41 def main(args, net, datadir_path):
43 print 'p2pool (version %s)' % (p2pool.__version__,)
49 print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
52 # connect to bitcoind over JSON-RPC and do initial getmemorypool
53 url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
54 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
55 bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
56 good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.PARENT.RPC_CHECK)(bitcoind)
58 print >>sys.stderr, " Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
60 temp_work = yield getwork(bitcoind)
62 print ' Current block hash: %x' % (temp_work['previous_block_hash'],)
65 # connect to bitcoind over bitcoin-p2p
66 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
67 factory = bitcoin_p2p.ClientFactory(net.PARENT)
68 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
69 yield factory.getProtocol() # waits until handshake is successful
73 if args.pubkey_hash is None:
74 print 'Getting payout address from bitcoind...'
75 my_script = yield deferral.retry('Error getting payout address from bitcoind:', 5)(defer.inlineCallbacks(lambda: defer.returnValue(
76 bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net.PARENT)))
79 print 'Computing payout script from provided address....'
80 my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
82 print ' Payout script:', bitcoin_data.script2_to_human(my_script, net.PARENT)
85 ht = bitcoin_p2p.HeightTracker(bitcoind, factory)
87 my_share_hashes = set()
88 my_doa_share_hashes = set()
90 tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes)
91 shared_share_hashes = set()
92 ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
93 known_verified = set()
95 print "Loading shares..."
96 for i, (mode, contents) in enumerate(ss.get_shares()):
98 if contents.hash in tracker.shares:
100 shared_share_hashes.add(contents.hash)
101 contents.time_seen = 0
102 tracker.add(contents)
103 if len(tracker.shares) % 1000 == 0 and tracker.shares:
104 print " %i" % (len(tracker.shares),)
105 elif mode == 'verified_hash':
106 known_verified.add(contents)
108 raise AssertionError()
109 print " ...inserting %i verified shares..." % (len(known_verified),)
110 for h in known_verified:
111 if h not in tracker.shares:
112 ss.forget_verified_share(h)
114 tracker.verified.add(tracker.shares[h])
115 print " ...done loading %i shares!" % (len(tracker.shares),)
117 tracker.removed.watch(lambda share: ss.forget_share(share.hash))
118 tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
119 tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
121 peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
123 pre_current_work = variable.Variable(None)
124 pre_merged_work = variable.Variable(None)
125 # information affecting work that should trigger a long-polling update
126 current_work = variable.Variable(None)
127 # information affecting work that should not trigger a long-polling update
128 current_work2 = variable.Variable(None)
130 requested = expiring_dict.ExpiringDict(300)
132 @defer.inlineCallbacks
133 def set_real_work1():
134 work = yield getwork(bitcoind)
135 current_work2.set(dict(
137 transactions=work['transactions'],
138 subsidy=work['subsidy'],
139 clock_offset=time.time() - work['time'],
140 last_update=time.time(),
141 )) # second set first because everything hooks on the first
142 pre_current_work.set(dict(
143 version=work['version'],
144 previous_block=work['previous_block_hash'],
146 coinbaseflags=work['coinbaseflags'],
149 def set_real_work2():
150 best, desired = tracker.think(ht, pre_current_work.value['previous_block'])
152 t = dict(pre_current_work.value)
153 t['best_share_hash'] = best
154 t['aux_work'] = pre_merged_work.value
158 for peer2, share_hash in desired:
159 if share_hash not in tracker.tails: # was received in the time tracker.think was running
161 last_request_time, count = requested.get(share_hash, (None, 0))
162 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
164 potential_peers = set()
165 for head in tracker.tails[share_hash]:
166 potential_peers.update(peer_heads.get(head, set()))
167 potential_peers = [peer for peer in potential_peers if peer.connected2]
168 if count == 0 and peer2 is not None and peer2.connected2:
171 peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
175 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
179 stops=list(set(tracker.heads) | set(
180 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
183 requested[share_hash] = t, count + 1
184 pre_current_work.changed.watch(lambda _: set_real_work2())
186 print 'Initializing work...'
187 yield set_real_work1()
191 pre_merged_work.changed.watch(lambda _: set_real_work2())
192 ht.updated.watch(set_real_work2)
194 merged_proxy = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,)) if args.merged_url else None
196 @defer.inlineCallbacks
197 def set_merged_work():
199 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged_proxy.rpc_getauxblock)()
200 pre_merged_work.set(dict(
201 hash=int(auxblock['hash'], 16),
202 target=pack.IntType(256).unpack(auxblock['target'].decode('hex')),
203 chain_id=auxblock['chainid'],
205 yield deferral.sleep(1)
206 if merged_proxy is not None:
209 @pre_merged_work.changed.watch
210 def _(new_merged_work):
211 print "Got new merged mining work! Difficulty: %f" % (bitcoin_data.target_to_difficulty(new_merged_work['target']),)
213 # setup p2p logic and join p2pool network
215 class Node(p2p.Node):
216 def handle_shares(self, shares, peer):
218 print 'Processing %i shares from %s...' % (len(shares), '%s:%i' % peer.addr if peer is not None else None)
222 if share.hash in tracker.shares:
223 #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
228 #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
232 if shares and peer is not None:
233 peer_heads.setdefault(shares[0].hash, set()).add(peer)
239 print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
241 def handle_share_hashes(self, hashes, peer):
244 for share_hash in hashes:
245 if share_hash in tracker.shares:
247 last_request_time, count = requested.get(share_hash, (None, 0))
248 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
250 print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
251 get_hashes.append(share_hash)
252 requested[share_hash] = t, count + 1
254 if hashes and peer is not None:
255 peer_heads.setdefault(hashes[0], set()).add(peer)
257 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
259 def handle_get_shares(self, hashes, parents, stops, peer):
260 parents = min(parents, 1000//len(hashes))
263 for share_hash in hashes:
264 for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))):
265 if share.hash in stops:
268 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
269 peer.sendShares(shares)
271 @tracker.verified.added.watch
273 if share.pow_hash <= share.header['bits'].target:
274 if factory.conn.value is not None:
275 factory.conn.value.send_block(block=share.as_block(tracker))
277 print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Erp!'
279 print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %x' % (p2pool_data.format_hash(share.hash), share.header_hash)
281 recent_blocks.append({ 'ts': share.timestamp, 'hash': '%x' % (share.header_hash) })
283 print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
285 @defer.inlineCallbacks
288 ip, port = x.split(':')
289 defer.returnValue(((yield reactor.resolve(ip)), int(port)))
291 defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT))
294 if os.path.exists(os.path.join(datadir_path, 'addrs.txt')):
296 addrs.update(dict(eval(x) for x in open(os.path.join(datadir_path, 'addrs.txt'))))
298 print >>sys.stderr, "error reading addrs"
299 for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
302 if addr not in addrs:
303 addrs[addr] = (0, time.time(), time.time())
307 connect_addrs = set()
308 for addr_df in map(parse, args.p2pool_nodes):
310 connect_addrs.add((yield addr_df))
315 best_share_hash_func=lambda: current_work.value['best_share_hash'],
316 port=args.p2pool_port,
319 connect_addrs=connect_addrs,
324 open(os.path.join(datadir_path, 'addrs.txt'), 'w').writelines(repr(x) + '\n' for x in p2p_node.addr_store.iteritems())
325 task.LoopingCall(save_addrs).start(60)
327 # send share when the chain changes to their chain
328 def work_changed(new_work):
329 #print 'Work changed:', new_work
331 for share in tracker.get_chain(new_work['best_share_hash'], min(5, tracker.get_height(new_work['best_share_hash']))):
332 if share.hash in shared_share_hashes:
334 shared_share_hashes.add(share.hash)
337 for peer in p2p_node.peers.itervalues():
338 peer.sendShares([share for share in shares if share.peer is not peer])
340 current_work.changed.watch(work_changed)
343 for share in tracker.get_chain(current_work.value['best_share_hash'], min(tracker.get_height(current_work.value['best_share_hash']), 2*net.CHAIN_LENGTH)):
345 if share.hash in tracker.verified.shares:
346 ss.add_verified_hash(share.hash)
347 task.LoopingCall(save_shares).start(60)
352 @defer.inlineCallbacks
356 is_lan, lan_ip = yield ipdiscover.get_local_ip()
358 pm = yield portmapper.get_port_mapper()
359 yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
360 except defer.TimeoutError:
364 log.err(None, "UPnP error:")
365 yield deferral.sleep(random.expovariate(1/120))
370 # start listening for workers with a JSON-RPC server
372 print 'Listening for workers on port %i...' % (args.worker_port,)
374 if os.path.exists(os.path.join(datadir_path, 'vip_pass')):
375 with open(os.path.join(datadir_path, 'vip_pass'), 'rb') as f:
376 vip_pass = f.read().strip('\r\n')
378 vip_pass = '%016x' % (random.randrange(2**64),)
379 with open(os.path.join(datadir_path, 'vip_pass'), 'wb') as f:
381 print ' Worker password:', vip_pass, '(only required for generating graphs)'
385 removed_unstales_var = variable.Variable((0, 0, 0))
386 @tracker.verified.removed.watch
388 if share.hash in my_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
389 assert share.share_data['stale_info'] in [0, 253, 254] # we made these shares in this instance
390 removed_unstales_var.set((
391 removed_unstales_var.value[0] + 1,
392 removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 253 else 0),
393 removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 254 else 0),
396 removed_doa_unstales_var = variable.Variable(0)
397 @tracker.verified.removed.watch
399 if share.hash in my_doa_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
400 removed_doa_unstales.set(removed_doa_unstales.value + 1)
402 def get_stale_counts():
403 '''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)'''
404 my_shares = len(my_share_hashes)
405 my_doa_shares = len(my_doa_share_hashes)
406 delta = tracker.verified.get_delta(current_work.value['best_share_hash'])
407 my_shares_in_chain = delta.my_count + removed_unstales_var.value[0]
408 my_doa_shares_in_chain = delta.my_doa_count + removed_doa_unstales_var.value
409 orphans_recorded_in_chain = delta.my_orphan_announce_count + removed_unstales_var.value[1]
410 doas_recorded_in_chain = delta.my_dead_announce_count + removed_unstales_var.value[2]
412 my_shares_not_in_chain = my_shares - my_shares_in_chain
413 my_doa_shares_not_in_chain = my_doa_shares - my_doa_shares_in_chain
415 return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
417 class WorkerBridge(worker_interface.WorkerBridge):
419 worker_interface.WorkerBridge.__init__(self)
420 self.new_work_event = current_work.changed
422 self.merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
423 self.recent_shares_ts_work = []
425 def _get_payout_script_from_username(self, user):
429 pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, net.PARENT)
432 return bitcoin_data.pubkey_hash_to_script2(pubkey_hash)
434 def preprocess_request(self, request):
435 payout_script = self._get_payout_script_from_username(request.getUser())
436 if payout_script is None or random.uniform(0, 100) < args.worker_fee:
437 payout_script = my_script
438 return payout_script,
440 def get_work(self, payout_script):
441 if len(p2p_node.peers) == 0 and net.PERSIST:
442 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
443 if current_work.value['best_share_hash'] is None and net.PERSIST:
444 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
445 if time.time() > current_work2.value['last_update'] + 60:
446 raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
448 share_info, generate_tx = p2pool_data.generate_transaction(
451 previous_share_hash=current_work.value['best_share_hash'],
452 coinbase=(('' if current_work.value['aux_work'] is None else
453 '\xfa\xbemm' + pack.IntType(256, 'big').pack(current_work.value['aux_work']['hash']) + struct.pack('<ii', 1, 0)) + current_work.value['coinbaseflags'])[:100],
454 nonce=struct.pack('<Q', random.randrange(2**64)),
455 new_script=payout_script,
456 subsidy=current_work2.value['subsidy'],
457 donation=math.perfect_round(65535*args.donation_percentage/100),
458 stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
459 253 if orphans > orphans_recorded_in_chain else
460 254 if doas > doas_recorded_in_chain else
462 )(*get_stale_counts()),
464 block_target=current_work.value['bits'].target,
465 desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
469 target = 2**256//2**32 - 1
470 if len(self.recent_shares_ts_work) == 50:
471 hash_rate = sum(work for ts, work in self.recent_shares_ts_work)//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
472 target = min(target, 2**256//(hash_rate * 5))
473 target = max(target, share_info['bits'].target)
474 if current_work.value['aux_work']:
475 target = max(target, current_work.value['aux_work']['target'])
477 transactions = [generate_tx] + list(current_work2.value['transactions'])
478 merkle_root = bitcoin_data.merkle_hash([bitcoin_data.hash256(bitcoin_data.tx_type.pack(x)) for x in transactions])
479 self.merkle_root_to_transactions[merkle_root] = share_info, transactions, time.time(), current_work.value['aux_work'], target
481 print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (
482 bitcoin_data.target_to_difficulty(target),
483 bitcoin_data.target_to_difficulty(share_info['bits'].target),
484 (sum(t['value'] for t in generate_tx['tx_outs'] if t['script'] == payout_script) - current_work2.value['subsidy']//200)*1e-8, net.PARENT.SYMBOL,
485 current_work2.value['subsidy']*1e-8, net.PARENT.SYMBOL,
486 len(current_work2.value['transactions']),
489 return bitcoin_getwork.BlockAttempt(
490 version=current_work.value['version'],
491 previous_block=current_work.value['previous_block'],
492 merkle_root=merkle_root,
493 timestamp=current_work2.value['time'],
494 bits=current_work.value['bits'],
498 def got_response(self, header, request):
499 # match up with transactions
500 if header['merkle_root'] not in self.merkle_root_to_transactions:
501 print >>sys.stderr, '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
503 share_info, transactions, getwork_time, aux_work, target = self.merkle_root_to_transactions[header['merkle_root']]
505 pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
506 on_time = current_work.value['best_share_hash'] == share_info['share_data']['previous_share_hash']
509 if pow_hash <= header['bits'].target or p2pool.DEBUG:
510 if factory.conn.value is not None:
511 factory.conn.value.send_block(block=dict(header=header, txs=transactions))
513 print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Erp!'
514 if pow_hash <= header['bits'].target:
516 print 'GOT BLOCK FROM MINER! Passing to bitcoind! bitcoin: %x' % (bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)),)
518 recent_blocks.append({ 'ts': time.time(), 'hash': '%x' % (bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)),) })
520 log.err(None, 'Error while processing potential block:')
523 if aux_work is not None and (pow_hash <= aux_work['target'] or p2pool.DEBUG):
524 assert pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex') == transactions[0]['tx_ins'][0]['script'][4:4+32].encode('hex')
525 df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(merged_proxy.rpc_getauxblock)(
526 pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
527 bitcoin_data.aux_pow_type.pack(dict(
530 block_hash=bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)),
531 merkle_branch=bitcoin_data.calculate_merkle_branch([bitcoin_data.hash256(bitcoin_data.tx_type.pack(x)) for x in transactions], 0),
536 parent_block_header=header,
541 if result != (pow_hash <= aux_work['target']):
542 print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
544 print 'Merged block submittal result: %s' % (result,)
547 log.err(err, 'Error submitting merged block:')
549 log.err(None, 'Error while processing merged mining POW:')
551 if pow_hash <= share_info['bits'].target:
552 share = p2pool_data.Share(net, header, share_info, other_txs=transactions[1:])
553 print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
555 p2pool_data.format_hash(share.hash),
556 p2pool_data.format_hash(share.previous_hash),
557 time.time() - getwork_time,
558 ' DEAD ON ARRIVAL' if not on_time else '',
560 my_share_hashes.add(share.hash)
562 my_doa_share_hashes.add(share.hash)
563 p2p_node.handle_shares([share], None)
565 if pow_hash <= header['bits'].target:
566 for peer in p2p_node.peers.itervalues():
567 peer.sendShares([share])
568 shared_share_hashes.add(share.hash)
570 log.err(None, 'Error forwarding block solution:')
572 if pow_hash <= target:
573 reactor.callLater(1, grapher.add_localrate_point, bitcoin_data.target_to_average_attempts(target), not on_time)
574 if request.getPassword() == vip_pass:
575 reactor.callLater(1, grapher.add_localminer_point, request.getUser(), bitcoin_data.target_to_average_attempts(target), not on_time)
576 self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
577 while len(self.recent_shares_ts_work) > 50:
578 self.recent_shares_ts_work.pop(0)
580 if pow_hash > target:
581 print 'Worker submitted share with hash > target:'
582 print ' Hash: %56x' % (pow_hash,)
583 print ' Target: %56x' % (target,)
587 web_root = resource.Resource()
588 worker_interface.WorkerInterface(WorkerBridge()).attach_to(web_root)
591 if tracker.get_height(current_work.value['best_share_hash']) < 720:
592 return json.dumps(None)
593 return json.dumps(p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
594 / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720)))
597 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
598 weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
600 for script in sorted(weights, key=lambda s: weights[s]):
601 res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
602 return json.dumps(res)
604 def get_current_txouts():
606 tmp_tag = str(random.randrange(2**64))
607 outputs = wb.merkle_root_to_transactions[wb.get_work(tmp_tag).merkle_root][1][0]['tx_outs']
608 total = sum(out['value'] for out in outputs)
609 total_without_tag = sum(out['value'] for out in outputs if out['script'] != tmp_tag)
610 total_diff = total - total_without_tag
611 return dict((out['script'], out['value'] + math.perfect_round(out['value']*total_diff/total)) for out in outputs if out['script'] != tmp_tag and out['value'])
613 def get_current_scaled_txouts(scale, trunc=0):
614 txouts = get_current_txouts()
615 total = sum(txouts.itervalues())
616 results = dict((script, value*scale//total) for script, value in txouts.iteritems())
620 for s in sorted(results, key=results.__getitem__):
621 total_random += results[s]
623 if total_random >= trunc and results[s] >= trunc:
625 winner = math.weighted_choice((script, results[script]) for script in random_set)
626 for script in random_set:
628 results[winner] = total_random
629 if sum(results.itervalues()) < int(scale):
630 results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
633 def get_current_payouts():
634 return json.dumps(dict((bitcoin_data.script2_to_human(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems()))
636 def get_patron_sendmany(this):
639 this, trunc = this.split('/', 1)
642 return json.dumps(dict(
643 (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
644 for script, value in get_current_scaled_txouts(scale=int(float(this)*1e8), trunc=int(float(trunc)*1e8)).iteritems()
645 if bitcoin_data.script2_to_address(script, net.PARENT) is not None
648 return json.dumps(None)
650 def get_global_stats():
651 # averaged over last hour
652 lookbehind = 3600//net.SHARE_PERIOD
653 if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
656 nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
657 stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
658 return json.dumps(dict(
659 pool_nonstale_hash_rate=nonstale_hash_rate,
660 pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
661 pool_stale_prop=stale_prop,
664 def get_local_stats():
665 lookbehind = 3600//net.SHARE_PERIOD
666 if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
669 global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
671 my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
672 my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
673 my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
674 my_share_count = my_unstale_count + my_orphan_count + my_doa_count
675 my_stale_count = my_orphan_count + my_doa_count
677 my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
679 my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
680 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1)
681 if share.hash in my_share_hashes)
682 actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp -
683 tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp)
684 share_att_s = my_work / actual_time
686 return json.dumps(dict(
687 my_hash_rates_in_last_hour=dict(
688 nonstale=share_att_s,
689 rewarded=share_att_s/(1 - global_stale_prop),
690 actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
692 my_share_counts_in_last_hour=dict(
693 shares=my_share_count,
694 unstale_shares=my_unstale_count,
695 stale_shares=my_stale_count,
696 orphan_stale_shares=my_orphan_count,
697 doa_stale_shares=my_doa_count,
699 my_stale_proportions_in_last_hour=dict(
701 orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
702 dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
706 def get_peer_addresses():
707 return ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues())
709 class WebInterface(resource.Resource):
710 def __init__(self, func, mime_type, *fields):
711 self.func, self.mime_type, self.fields = func, mime_type, fields
713 def render_GET(self, request):
714 request.setHeader('Content-Type', self.mime_type)
715 request.setHeader('Access-Control-Allow-Origin', '*')
716 return self.func(*(request.args[field][0] for field in self.fields))
718 web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
719 web_root.putChild('users', WebInterface(get_users, 'application/json'))
720 web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
721 web_root.putChild('current_payouts', WebInterface(get_current_payouts, 'application/json'))
722 web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain', 'total'))
723 web_root.putChild('global_stats', WebInterface(get_global_stats, 'application/json'))
724 web_root.putChild('local_stats', WebInterface(get_local_stats, 'application/json'))
725 web_root.putChild('peer_addresses', WebInterface(get_peer_addresses, 'text/plain'))
726 web_root.putChild('payout_addr', WebInterface(lambda: json.dumps(bitcoin_data.script2_to_human(my_script, net.PARENT)), 'application/json'))
727 web_root.putChild('recent_blocks', WebInterface(lambda: json.dumps(recent_blocks), 'application/json'))
729 web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
731 grapher = graphs.Grapher(os.path.join(datadir_path, 'rrd'))
732 web_root.putChild('graphs', grapher.get_resource())
734 if tracker.get_height(current_work.value['best_share_hash']) < 720:
736 nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
737 poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
738 grapher.add_poolrate_point(poolrate, poolrate - nonstalerate)
739 task.LoopingCall(add_point).start(100)
743 reactor.listenTCP(args.worker_port, server.Site(web_root))
744 except error.CannotListenError, e:
745 if e.socketError.errno == 98:
746 print ' Worker port already in use. Retrying in 1 second...'
747 yield deferral.sleep(1)
757 @defer.inlineCallbacks
760 flag = factory.new_block.get_deferred()
762 yield set_real_work1()
765 yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
770 print 'Started successfully!'
774 if hasattr(signal, 'SIGALRM'):
775 signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
776 sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
778 signal.siginterrupt(signal.SIGALRM, False)
779 task.LoopingCall(signal.alarm, 30).start(1)
781 if args.irc_announce:
782 from twisted.words.protocols import irc
783 class IRCClient(irc.IRCClient):
785 def lineReceived(self, line):
787 irc.IRCClient.lineReceived(self, line)
789 irc.IRCClient.signedOn(self)
790 self.factory.resetDelay()
792 self.watch_id = tracker.verified.added.watch(self._new_share)
793 self.announced_hashes = set()
794 def _new_share(self, share):
795 if share.pow_hash <= share.header['bits'].target and share.header_hash not in self.announced_hashes:
796 self.announced_hashes.add(share.header_hash)
797 self.say('#p2pool', '\x032,1BLOCK FOUND by %s! http://blockexplorer.com/block/%064x' % (bitcoin_data.script2_to_human(share.new_script, net.PARENT), share.header_hash))
798 def connectionLost(self, reason):
799 tracker.verified.added.unwatch(self.watch_id)
800 class IRCClientFactory(protocol.ReconnectingClientFactory):
802 reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())
804 @defer.inlineCallbacks
809 yield deferral.sleep(3)
811 if time.time() > current_work2.value['last_update'] + 60:
812 print >>sys.stderr, '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
813 if current_work.value['best_share_hash'] is not None:
814 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
816 att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720))
817 weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
818 (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
819 stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], min(720, height))
820 real_att_s = att_s / (1 - stale_prop)
821 my_att_s = real_att_s*weights.get(my_script, 0)/total_weight
822 this_str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i (%i incoming)' % (
823 math.format(int(real_att_s)),
825 len(tracker.verified.shares),
827 weights.get(my_script, 0)/total_weight*100,
828 math.format(int(my_att_s)),
833 sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
834 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
835 this_str += '\nAverage time between blocks: %.2f days' % (
836 2**256 / current_work.value['bits'].target / real_att_s / (60 * 60 * 24),
838 this_str += '\nPool stales: %i%% Own: %s Own efficiency: %s' % (
839 int(100*stale_prop+.5),
840 math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
841 math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
843 if this_str != last_str or time.time() > last_time + 15:
846 last_time = time.time()
851 log.err(None, 'Fatal error:')
854 class FixedArgumentParser(argparse.ArgumentParser):
855 def _read_args_from_files(self, arg_strings):
856 # expand arguments referencing files
858 for arg_string in arg_strings:
860 # for regular arguments, just add them back into the list
861 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
862 new_arg_strings.append(arg_string)
864 # replace arguments referencing files with the file content
867 args_file = open(arg_string[1:])
870 for arg_line in args_file.read().splitlines():
871 for arg in self.convert_arg_line_to_args(arg_line):
872 arg_strings.append(arg)
873 arg_strings = self._read_args_from_files(arg_strings)
874 new_arg_strings.extend(arg_strings)
878 err = sys.exc_info()[1]
881 # return the modified argument list
882 return new_arg_strings
884 def convert_arg_line_to_args(self, arg_line):
885 return [arg for arg in arg_line.split() if arg.strip()]
887 parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
888 parser.add_argument('--version', action='version', version=p2pool.__version__)
889 parser.add_argument('--net',
890 help='use specified network (default: bitcoin)',
891 action='store', choices=sorted(networks.realnets), default='bitcoin', dest='net_name')
892 parser.add_argument('--testnet',
893 help='''use the network's testnet''',
894 action='store_const', const=True, default=False, dest='testnet')
895 parser.add_argument('--debug',
896 help='enable debugging mode',
897 action='store_const', const=True, default=False, dest='debug')
898 parser.add_argument('-a', '--address',
899 help='generate payouts to this address (default: <address requested from bitcoind>)',
900 type=str, action='store', default=None, dest='address')
901 parser.add_argument('--logfile',
902 help='''log to this file (default: data/<NET>/log)''',
903 type=str, action='store', default=None, dest='logfile')
904 parser.add_argument('--merged-url',
905 help='call getauxblock on this url to get work for merged mining (example: http://127.0.0.1:10332/)',
906 type=str, action='store', default=None, dest='merged_url')
907 parser.add_argument('--merged-userpass',
908 help='use this user and password when requesting merged mining work (example: ncuser:ncpass)',
909 type=str, action='store', default=None, dest='merged_userpass')
910 parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
911 help='donate this percentage of work to author of p2pool (default: 0.5)',
912 type=float, action='store', default=0.5, dest='donation_percentage')
913 parser.add_argument('--irc-announce',
914 help='announce any blocks found on irc://irc.freenode.net/#p2pool',
915 action='store_true', default=False, dest='irc_announce')
917 p2pool_group = parser.add_argument_group('p2pool interface')
918 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
919 help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.P2P_PORT) for _, n in sorted(networks.realnets.items())),
920 type=int, action='store', default=None, dest='p2pool_port')
921 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
922 help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
923 type=str, action='append', default=[], dest='p2pool_nodes')
924 parser.add_argument('--disable-upnp',
925 help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
926 action='store_false', default=True, dest='upnp')
928 worker_group = parser.add_argument_group('worker interface')
929 worker_group.add_argument('-w', '--worker-port', metavar='PORT',
930 help='listen on PORT for RPC connections from miners (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.WORKER_PORT) for _, n in sorted(networks.realnets.items())),
931 type=int, action='store', default=None, dest='worker_port')
932 worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
933 help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
934 type=float, action='store', default=0, dest='worker_fee')
936 bitcoind_group = parser.add_argument_group('bitcoind interface')
937 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
938 help='connect to this address (default: 127.0.0.1)',
939 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
940 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
941 help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (n.NAME, n.PARENT.RPC_PORT) for _, n in sorted(networks.realnets.items())),
942 type=int, action='store', default=None, dest='bitcoind_rpc_port')
943 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
944 help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (n.NAME, n.PARENT.P2P_PORT) for _, n in sorted(networks.realnets.items())),
945 type=int, action='store', default=None, dest='bitcoind_p2p_port')
947 bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
948 help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
949 type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
951 args = parser.parse_args()
956 net = networks.nets[args.net_name + ('_testnet' if args.testnet else '')]
958 datadir_path = os.path.join(os.path.dirname(sys.argv[0]), 'data', net.NAME)
959 if not os.path.exists(datadir_path):
960 os.makedirs(datadir_path)
962 if len(args.bitcoind_rpc_userpass) > 2:
963 parser.error('a maximum of two arguments are allowed')
964 args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
966 if args.bitcoind_rpc_password is None:
967 if not hasattr(net, 'CONF_FILE_FUNC'):
968 parser.error('This network has no configuration file function. Manually enter your RPC password.')
969 conf_path = net.CONF_FILE_FUNC()
970 if not os.path.exists(conf_path):
971 parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
972 '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
974 '''server=true\r\n'''
975 '''rpcpassword=%x # (randomly generated for your convenience)''' % (conf_path, random.randrange(2**128)))
976 with open(conf_path, 'rb') as f:
977 cp = ConfigParser.RawConfigParser()
978 cp.readfp(StringIO.StringIO('[x]\r\n' + f.read()))
979 for conf_name, var_name, var_type in [
980 ('rpcuser', 'bitcoind_rpc_username', str),
981 ('rpcpassword', 'bitcoind_rpc_password', str),
982 ('rpcport', 'bitcoind_rpc_port', int),
983 ('port', 'bitcoind_p2p_port', int),
985 if getattr(args, var_name) is None and cp.has_option('x', conf_name):
986 setattr(args, var_name, var_type(cp.get('x', conf_name)))
988 if args.bitcoind_rpc_username is None:
989 args.bitcoind_rpc_username = ''
991 if args.bitcoind_rpc_port is None:
992 args.bitcoind_rpc_port = net.PARENT.RPC_PORT
994 if args.bitcoind_p2p_port is None:
995 args.bitcoind_p2p_port = net.PARENT.P2P_PORT
997 if args.p2pool_port is None:
998 args.p2pool_port = net.P2P_PORT
1000 if args.worker_port is None:
1001 args.worker_port = net.WORKER_PORT
1003 if args.address is not None:
1005 args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
1006 except Exception, e:
1007 parser.error('error parsing address: ' + repr(e))
1009 args.pubkey_hash = None
1011 if (args.merged_url is None) ^ (args.merged_userpass is None):
1012 parser.error('must specify --merged-url and --merged-userpass')
1015 if args.logfile is None:
1016 args.logfile = os.path.join(datadir_path, 'log')
1018 logfile = logging.LogFile(args.logfile)
1019 pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
1020 sys.stdout = logging.AbortPipe(pipe)
1021 sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
1022 if hasattr(signal, "SIGUSR1"):
1023 def sigusr1(signum, frame):
1024 print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
1026 print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
1027 signal.signal(signal.SIGUSR1, sigusr1)
1028 task.LoopingCall(logfile.reopen).start(5)
1030 reactor.callWhenRunning(main, args, net, datadir_path)