3 from __future__ import division
14 from twisted.internet import defer, reactor
15 from twisted.web import server
16 from twisted.python import log
18 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
19 from util import db, expiring_dict, jsonrpc, variable, deferral, math, skiplist
20 from . import p2p, worker_interface
21 import p2pool.data as p2pool
22 import p2pool as p2pool_init
24 @deferral.retry('Error getting work from bitcoind:', 3)
25 @defer.inlineCallbacks
26 def getwork(bitcoind):
27 # a block could arrive in between these two queries
28 getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
30 getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
32 # get rid of residual errors
33 getwork_df.addErrback(lambda fail: None)
34 height_df.addErrback(lambda fail: None)
35 defer.returnValue((getwork, height))
37 @deferral.retry('Error getting payout script from bitcoind:', 1)
38 @defer.inlineCallbacks
39 def get_payout_script(factory):
40 res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
41 if res['reply'] == 'success':
42 my_script = res['script']
43 elif res['reply'] == 'denied':
46 raise ValueError('Unexpected reply: %r' % (res,))
48 @deferral.retry('Error creating payout script:', 10)
49 @defer.inlineCallbacks
50 def get_payout_script2(bitcoind, net):
51 defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
53 @defer.inlineCallbacks
56 print 'p2pool (version %s)' % (p2pool_init.__version__,)
59 # connect to bitcoind over JSON-RPC and do initial getwork
60 url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
61 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
62 bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
63 temp_work, temp_height = yield getwork(bitcoind)
65 print ' Current block hash: %x height: %i' % (temp_work.previous_block, temp_height)
68 # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
69 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
70 factory = bitcoin.p2p.ClientFactory(args.net)
71 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
72 my_script = yield get_payout_script(factory)
73 if args.pubkey_hash is None:
75 print 'IP transaction denied ... falling back to sending to address.'
76 my_script = yield get_payout_script2(bitcoind, args.net)
78 my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
80 print ' Payout script:', my_script.encode('hex')
83 ht = bitcoin.p2p.HeightTracker(factory)
85 tracker = p2pool.OkayTracker(args.net)
86 chains = expiring_dict.ExpiringDict(300)
87 def get_chain(chain_id_data):
88 return chains.setdefault(chain_id_data, Chain(chain_id_data))
90 peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
92 # information affecting work that should trigger a long-polling update
93 current_work = variable.Variable(None)
94 # information affecting work that should not trigger a long-polling update
95 current_work2 = variable.Variable(None)
97 work_updated = variable.Event()
98 tracker_updated = variable.Event()
100 requested = expiring_dict.ExpiringDict(300)
102 @defer.inlineCallbacks
103 def set_real_work1():
104 work, height = yield getwork(bitcoind)
105 # XXX call tracker_updated
106 current_work.set(dict(
107 version=work.version,
108 previous_block=work.previous_block,
111 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
113 current_work2.set(dict(
114 clock_offset=time.time() - work.timestamp,
117 @defer.inlineCallbacks
118 def set_real_work2():
119 best, desired = yield tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
121 t = dict(current_work.value)
122 t['best_share_hash'] = best
125 for peer2, share_hash in desired:
126 last_request_time = requested.get(share_hash, None)
127 if last_request_time is not None and last_request_time - 5 < time.time() < last_request_time + 10:
129 potential_peers = set()
130 for head in tracker.tails[share_hash]:
131 potential_peers.update(peer_heads.get(head, set()))
132 potential_peers = [peer for peer in potential_peers if peer.connected2]
133 peer = random.choice(potential_peers) if potential_peers and random.random() > .5 else peer2
137 print 'Requesting parent share %x from %s' % (share_hash % 2**32, '%s:%i' % peer.addr)
141 stops=list(set(tracker.heads) | set(
142 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
145 requested[share_hash] = time.time()
147 print 'Initializing work...'
148 yield set_real_work1()
149 yield set_real_work2()
152 start_time = time.time() - current_work2.value['clock_offset']
154 # setup p2p logic and join p2pool network
156 def share_share(share, ignore_peer=None):
157 for peer in p2p_node.peers.itervalues():
158 if peer is ignore_peer:
160 peer.send_shares([share])
163 def p2p_shares(shares, peer=None):
165 print "Processing %i shares..." % (len(shares),)
169 if share.hash in tracker.shares:
170 #print 'Got duplicate share, ignoring. Hash: %x' % (share.hash % 2**32,)
174 #print 'Received share %x from %r' % (share.hash % 2**32, share.peer.addr if share.peer is not None else None)
177 #for peer2, share_hash in desired:
178 # print 'Requesting parent share %x' % (share_hash,)
179 # peer2.send_getshares(hashes=[share_hash], parents=2000)
181 if share.bitcoin_hash <= share.header['target']:
183 print 'GOT BLOCK! Passing to bitcoind! %x bitcoin: %x' % (share.hash % 2**32, share.bitcoin_hash,)
185 if factory.conn.value is not None:
186 factory.conn.value.send_block(block=share.as_block(tracker, args.net))
188 print 'No bitcoind connection! Erp!'
190 if shares and peer is not None:
191 peer_heads.setdefault(shares[0].hash, set()).add(peer)
194 tracker_updated.happened()
197 print "... done processing %i shares." % (len(shares),)
199 def p2p_share_hashes(share_hashes, peer):
201 for share_hash in share_hashes:
202 if share_hash in tracker.shares:
203 pass # print 'Got share hash, already have, ignoring. Hash: %x' % (share_hash % 2**32,)
205 print 'Got share hash, requesting! Hash: %x' % (share_hash % 2**32,)
206 get_hashes.append(share_hash)
208 if share_hashes and peer is not None:
209 peer_heads.setdefault(share_hashes[0], set()).add(peer)
211 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
213 def p2p_get_shares(share_hashes, parents, stops, peer):
214 parents = min(parents, 1000//len(share_hashes))
217 for share_hash in share_hashes:
218 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
219 if share.hash in stops:
222 peer.send_shares(shares, full=True)
224 print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
228 ip, port = x.split(':')
231 return x, args.net.P2P_PORT
234 ('72.14.191.28', args.net.P2P_PORT),
235 ('62.204.197.159', args.net.P2P_PORT),
238 nodes.append(((yield reactor.resolve('p2pool.forre.st')), args.net.P2P_PORT))
241 print 'Error resolving bootstrap node IP:'
246 current_work=current_work,
247 port=args.p2pool_port,
249 addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
250 mode=0 if args.low_bandwidth else 1,
251 preferred_addrs=map(parse, args.p2pool_nodes) + nodes,
253 p2p_node.handle_shares = p2p_shares
254 p2p_node.handle_share_hashes = p2p_share_hashes
255 p2p_node.handle_get_shares = p2p_get_shares
259 # send share when the chain changes to their chain
260 def work_changed(new_work):
261 #print 'Work changed:', new_work
262 for share in tracker.get_chain_known(new_work['best_share_hash']):
265 share_share(share, share.peer)
266 current_work.changed.watch(work_changed)
271 # start listening for workers with a JSON-RPC server
273 print 'Listening for workers on port %i...' % (args.worker_port,)
277 merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
278 run_identifier = struct.pack('<Q', random.randrange(2**64))
280 def compute(state, all_targets):
281 pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
282 pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
285 for tx in pre_extra_txs:
286 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
287 if size + this_size > 500000:
292 # XXX assuming generate_tx is smallish here..
293 generate_tx = p2pool.generate_transaction(
295 previous_share_hash=state['best_share_hash'],
296 new_script=my_script,
297 subsidy=(50*100000000 >> (state['height'] + 1)//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs),
298 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
299 block_target=state['target'],
302 print 'Generating! Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
303 #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
304 #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
305 transactions = [generate_tx] + [tx.tx for tx in extra_txs]
306 merkle_root = bitcoin.data.merkle_hash(transactions)
307 merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
309 timestamp = int(time.time() - current_work2.value['clock_offset'])
310 if state['best_share_hash'] is not None:
311 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
312 if timestamp2 > timestamp:
313 print 'Toff', timestamp2 - timestamp
314 timestamp = timestamp2
315 target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
317 target2 = min(2**256//2**32 - 1, target2)
318 times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
319 #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
320 return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
325 def got_response(data):
327 # match up with transactions
328 header = bitcoin.getwork.decode_data(data)
329 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
330 if transactions is None:
331 print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
333 block = dict(header=header, txs=transactions)
334 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
335 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
337 print 'GOT BLOCK! Passing to bitcoind! %x' % (hash_,)
339 if factory.conn.value is not None:
340 factory.conn.value.send_block(block=block)
342 print 'No bitcoind connection! Erp!'
343 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
345 print 'Received invalid share from worker - %x/%x' % (hash_, target)
347 share = p2pool.Share.from_block(block)
348 my_shares.add(share.hash)
349 print 'GOT SHARE! %x prev %x' % (share.hash % 2**32, 0 if share.previous_hash is None else share.previous_hash % 2**32), "DEAD ON ARRIVAL" if share.previous_hash != current_work.value['best_share_hash'] else "", time.time() - times[share.nonce], "s since getwork"
350 good = share.previous_hash == current_work.value['best_share_hash']
351 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
353 # eg. good = share.hash == current_work.value['best_share_hash'] here
357 print 'Error processing data received from worker:'
363 if current_work.value['best_share_hash'] is not None:
364 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
365 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net)
368 reactor.listenTCP(args.worker_port, server.Site(worker_interface.WorkerInterface(current_work, compute, got_response, get_rate)))
375 tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
376 get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
379 def __init__(self, tx, seen_at_block):
380 self.hash = bitcoin.data.tx_type.hash256(tx)
382 self.seen_at_block = seen_at_block
383 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
385 #print '%x %r' % (seen_at_block, tx)
386 #for mention in self.mentions:
387 # print '%x' % mention
389 self.parents_all_in_blocks = False
392 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
393 self._find_parents_in_blocks()
395 @defer.inlineCallbacks
396 def _find_parents_in_blocks(self):
397 for tx_in in self.tx['tx_ins']:
399 raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
402 self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
403 #print raw_transaction
404 if not raw_transaction['parent_blocks']:
406 self.parents_all_in_blocks = True
409 if not self.parents_all_in_blocks:
415 @defer.inlineCallbacks
418 assert isinstance(tx_hash, (int, long))
419 #print "REQUESTING", tx_hash
420 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
422 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
425 print 'Error handling tx:'
428 # disable for now, for testing impact on stales
429 #factory.new_tx.watch(new_tx)
431 def new_block(block_hash):
432 work_updated.happened()
433 factory.new_block.watch(new_block)
435 print 'Started successfully!'
438 @defer.inlineCallbacks
441 flag = work_updated.get_deferred()
443 yield set_real_work1()
446 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/1))], fireOnOneCallback=True)
448 @defer.inlineCallbacks
451 flag = tracker_updated.get_deferred()
453 yield set_real_work2()
456 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/1))], fireOnOneCallback=True)
461 counter = skiplist.CountsSkipList(tracker, my_script, run_identifier)
464 yield deferral.sleep(random.expovariate(1/1))
466 if current_work.value['best_share_hash'] is not None:
467 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
469 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net)
470 weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
471 count = counter(current_work.value['best_share_hash'], height, 2**100)
472 print 'Pool: %sH/s in %i shares Recent: %.02f%% >%sH/s Shares: %i (%i stale)' % (
475 weights.get(my_script, 0)/total_weight*100,
476 math.format(weights.get(my_script, 0)/total_weight*att_s),
478 len(my_shares) - count,
480 #weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 100), 2**100)
481 #for k, v in weights.iteritems():
482 # print k.encode('hex'), v/total_weight
493 parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,))
494 parser.add_argument('--version', action='version', version=p2pool_init.__version__)
495 parser.add_argument('--testnet',
496 help='use the testnet',
497 action='store_const', const=p2pool.Testnet, default=p2pool.Mainnet, dest='net')
498 parser.add_argument('--debug',
499 help='debugging mode',
500 action='store_const', const=True, default=False, dest='debug')
501 parser.add_argument('-a', '--address',
502 help='generate to this address (defaults to requesting one from bitcoind)',
503 type=str, action='store', default=None, dest='address')
505 p2pool_group = parser.add_argument_group('p2pool interface')
506 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
507 help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
508 type=int, action='store', default=None, dest='p2pool_port')
509 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
510 help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
511 type=str, action='append', default=[], dest='p2pool_nodes')
512 parser.add_argument('-l', '--low-bandwidth',
513 help='trade lower bandwidth usage for higher latency (reduced efficiency)',
514 action='store_true', default=False, dest='low_bandwidth')
516 worker_group = parser.add_argument_group('worker interface')
517 worker_group.add_argument('-w', '--worker-port', metavar='PORT',
518 help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
519 type=int, action='store', default=9332, dest='worker_port')
521 bitcoind_group = parser.add_argument_group('bitcoind interface')
522 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
523 help='connect to a bitcoind at this address (default: 127.0.0.1)',
524 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
525 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
526 help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
527 type=int, action='store', default=8332, dest='bitcoind_rpc_port')
528 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
529 help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
530 type=int, action='store', default=None, dest='bitcoind_p2p_port')
532 bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
533 help='bitcoind RPC interface username',
534 type=str, action='store', dest='bitcoind_rpc_username')
535 bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
536 help='bitcoind RPC interface password',
537 type=str, action='store', dest='bitcoind_rpc_password')
539 args = parser.parse_args()
542 p2pool_init.DEBUG = True
543 class TimestampingPipe(object):
544 def __init__(self, inner_file):
545 self.inner_file = inner_file
547 def write(self, data):
548 buf = self.buf + data
549 lines = buf.split('\n')
550 for line in lines[:-1]:
551 self.inner_file.write("%s %s\n" % (time.strftime("%H:%M:%S"), line))
553 sys.stdout = TimestampingPipe(sys.stdout)
554 sys.stderr = TimestampingPipe(sys.stderr)
556 if args.bitcoind_p2p_port is None:
557 args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
559 if args.p2pool_port is None:
560 args.p2pool_port = args.net.P2P_PORT
562 if args.address is not None:
564 args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
566 raise ValueError("error parsing address: " + repr(e))
568 args.pubkey_hash = None
570 reactor.callWhenRunning(main, args)