3 from __future__ import division
14 from twisted.internet import defer, reactor
15 from twisted.web import server
16 from twisted.python import log
18 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
19 from util import db, expiring_dict, jsonrpc, variable, deferral, math, skiplist
20 from . import p2p, worker_interface
21 import p2pool.data as p2pool
22 import p2pool as p2pool_init
24 @deferral.retry('Error getting work from bitcoind:', 3)
25 @defer.inlineCallbacks
26 def getwork(bitcoind):
27 # a block could arrive in between these two queries
28 getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
30 getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
32 # get rid of residual errors
33 getwork_df.addErrback(lambda fail: None)
34 height_df.addErrback(lambda fail: None)
35 defer.returnValue((getwork, height))
37 @deferral.retry('Error getting payout script from bitcoind:', 1)
38 @defer.inlineCallbacks
39 def get_payout_script(factory):
40 res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
41 if res['reply'] == 'success':
42 my_script = res['script']
43 elif res['reply'] == 'denied':
46 raise ValueError('Unexpected reply: %r' % (res,))
48 @deferral.retry('Error creating payout script:', 10)
49 @defer.inlineCallbacks
50 def get_payout_script2(bitcoind, net):
51 defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
53 @defer.inlineCallbacks
56 print 'p2pool (version %s)' % (p2pool_init.__version__,)
59 # connect to bitcoind over JSON-RPC and do initial getwork
60 url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
61 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
62 bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
63 temp_work, temp_height = yield getwork(bitcoind)
65 print ' Current block hash: %x height: %i' % (temp_work.previous_block, temp_height)
68 # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
69 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
70 factory = bitcoin.p2p.ClientFactory(args.net)
71 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
72 my_script = yield get_payout_script(factory)
73 if args.pubkey_hash is None:
75 print 'IP transaction denied ... falling back to sending to address.'
76 my_script = yield get_payout_script2(bitcoind, args.net)
78 my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
80 print ' Payout script:', my_script.encode('hex')
83 ht = bitcoin.p2p.HeightTracker(factory)
85 tracker = p2pool.OkayTracker(args.net)
86 chains = expiring_dict.ExpiringDict(300)
87 def get_chain(chain_id_data):
88 return chains.setdefault(chain_id_data, Chain(chain_id_data))
90 peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
92 # information affecting work that should trigger a long-polling update
93 current_work = variable.Variable(None)
94 # information affecting work that should not trigger a long-polling update
95 current_work2 = variable.Variable(None)
97 work_updated = variable.Event()
98 tracker_updated = variable.Event()
100 requested = expiring_dict.ExpiringDict(300)
102 @defer.inlineCallbacks
103 def set_real_work1():
104 work, height = yield getwork(bitcoind)
105 # XXX call tracker_updated
106 current_work.set(dict(
107 version=work.version,
108 previous_block=work.previous_block,
111 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
113 current_work2.set(dict(
114 clock_offset=time.time() - work.timestamp,
117 @defer.inlineCallbacks
118 def set_real_work2():
119 best, desired = yield tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
121 t = dict(current_work.value)
122 t['best_share_hash'] = best
125 for peer2, share_hash in desired:
126 last_request_time, count = requested.get(share_hash, (None, 0))
127 if last_request_time is not None and last_request_time - 5 < time.time() < last_request_time + 10 * 1.5**count:
129 potential_peers = set()
130 for head in tracker.tails[share_hash]:
131 potential_peers.update(peer_heads.get(head, set()))
132 potential_peers = [peer for peer in potential_peers if peer.connected2]
133 if count == 0 and peer2 is not None and peer2.connected2:
136 peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
140 print 'Requesting parent share %x from %s' % (share_hash % 2**32, '%s:%i' % peer.addr)
144 stops=list(set(tracker.heads) | set(
145 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
148 requested[share_hash] = time.time(), count + 1
150 print 'Initializing work...'
151 yield set_real_work1()
152 yield set_real_work2()
155 start_time = time.time() - current_work2.value['clock_offset']
157 # setup p2p logic and join p2pool network
159 def share_share(share, ignore_peer=None):
160 for peer in p2p_node.peers.itervalues():
161 if peer is ignore_peer:
163 peer.send_shares([share])
166 def p2p_shares(shares, peer=None):
168 print "Processing %i shares..." % (len(shares),)
172 if share.hash in tracker.shares:
173 #print 'Got duplicate share, ignoring. Hash: %x' % (share.hash % 2**32,)
177 #print 'Received share %x from %r' % (share.hash % 2**32, share.peer.addr if share.peer is not None else None)
180 #for peer2, share_hash in desired:
181 # print 'Requesting parent share %x' % (share_hash,)
182 # peer2.send_getshares(hashes=[share_hash], parents=2000)
184 if share.bitcoin_hash <= share.header['target']:
186 print 'GOT BLOCK! Passing to bitcoind! %x bitcoin: %x' % (share.hash % 2**32, share.bitcoin_hash,)
188 if factory.conn.value is not None:
189 factory.conn.value.send_block(block=share.as_block(tracker, args.net))
191 print 'No bitcoind connection! Erp!'
193 if shares and peer is not None:
194 peer_heads.setdefault(shares[0].hash, set()).add(peer)
197 tracker_updated.happened()
200 print "... done processing %i shares." % (len(shares),)
202 def p2p_share_hashes(share_hashes, peer):
204 for share_hash in share_hashes:
205 if share_hash in tracker.shares:
206 pass # print 'Got share hash, already have, ignoring. Hash: %x' % (share_hash % 2**32,)
208 print 'Got share hash, requesting! Hash: %x' % (share_hash % 2**32,)
209 get_hashes.append(share_hash)
211 if share_hashes and peer is not None:
212 peer_heads.setdefault(share_hashes[0], set()).add(peer)
214 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
216 def p2p_get_shares(share_hashes, parents, stops, peer):
217 parents = min(parents, 1000//len(share_hashes))
220 for share_hash in share_hashes:
221 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
222 if share.hash in stops:
225 peer.send_shares(shares, full=True)
227 print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
231 ip, port = x.split(':')
234 return x, args.net.P2P_PORT
237 ('72.14.191.28', args.net.P2P_PORT),
238 ('62.204.197.159', args.net.P2P_PORT),
241 nodes.append(((yield reactor.resolve('p2pool.forre.st')), args.net.P2P_PORT))
244 print 'Error resolving bootstrap node IP:'
249 current_work=current_work,
250 port=args.p2pool_port,
252 addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
253 mode=0 if args.low_bandwidth else 1,
254 preferred_addrs=map(parse, args.p2pool_nodes) + nodes,
256 p2p_node.handle_shares = p2p_shares
257 p2p_node.handle_share_hashes = p2p_share_hashes
258 p2p_node.handle_get_shares = p2p_get_shares
262 # send share when the chain changes to their chain
263 def work_changed(new_work):
264 #print 'Work changed:', new_work
265 for share in tracker.get_chain_known(new_work['best_share_hash']):
268 share_share(share, share.peer)
269 current_work.changed.watch(work_changed)
274 # start listening for workers with a JSON-RPC server
276 print 'Listening for workers on port %i...' % (args.worker_port,)
280 merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
281 run_identifier = struct.pack('<Q', random.randrange(2**64))
283 def compute(state, all_targets):
284 pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
285 pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
288 for tx in pre_extra_txs:
289 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
290 if size + this_size > 500000:
295 # XXX assuming generate_tx is smallish here..
296 generate_tx = p2pool.generate_transaction(
298 previous_share_hash=state['best_share_hash'],
299 new_script=my_script,
300 subsidy=(50*100000000 >> (state['height'] + 1)//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs),
301 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
302 block_target=state['target'],
305 print 'Generating! Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
306 #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
307 #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
308 transactions = [generate_tx] + [tx.tx for tx in extra_txs]
309 merkle_root = bitcoin.data.merkle_hash(transactions)
310 merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
312 timestamp = int(time.time() - current_work2.value['clock_offset'])
313 if state['best_share_hash'] is not None:
314 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
315 if timestamp2 > timestamp:
316 print 'Toff', timestamp2 - timestamp
317 timestamp = timestamp2
318 target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
320 target2 = min(2**256//2**32 - 1, target2)
321 times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
322 #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
323 return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
328 def got_response(data):
330 # match up with transactions
331 header = bitcoin.getwork.decode_data(data)
332 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
333 if transactions is None:
334 print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
336 block = dict(header=header, txs=transactions)
337 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
338 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
340 print 'GOT BLOCK! Passing to bitcoind! %x' % (hash_,)
342 if factory.conn.value is not None:
343 factory.conn.value.send_block(block=block)
345 print 'No bitcoind connection! Erp!'
346 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
348 print 'Received invalid share from worker - %x/%x' % (hash_, target)
350 share = p2pool.Share.from_block(block)
351 my_shares.add(share.hash)
352 print 'GOT SHARE! %x prev %x' % (share.hash % 2**32, 0 if share.previous_hash is None else share.previous_hash % 2**32), "DEAD ON ARRIVAL" if share.previous_hash != current_work.value['best_share_hash'] else "", time.time() - times[share.nonce], "s since getwork"
353 good = share.previous_hash == current_work.value['best_share_hash']
354 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
356 # eg. good = share.hash == current_work.value['best_share_hash'] here
360 print 'Error processing data received from worker:'
366 if current_work.value['best_share_hash'] is not None:
367 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
368 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net)
371 reactor.listenTCP(args.worker_port, server.Site(worker_interface.WorkerInterface(current_work, compute, got_response, get_rate)))
378 tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
379 get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
382 def __init__(self, tx, seen_at_block):
383 self.hash = bitcoin.data.tx_type.hash256(tx)
385 self.seen_at_block = seen_at_block
386 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
388 #print '%x %r' % (seen_at_block, tx)
389 #for mention in self.mentions:
390 # print '%x' % mention
392 self.parents_all_in_blocks = False
395 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
396 self._find_parents_in_blocks()
398 @defer.inlineCallbacks
399 def _find_parents_in_blocks(self):
400 for tx_in in self.tx['tx_ins']:
402 raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
405 self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
406 #print raw_transaction
407 if not raw_transaction['parent_blocks']:
409 self.parents_all_in_blocks = True
412 if not self.parents_all_in_blocks:
418 @defer.inlineCallbacks
421 assert isinstance(tx_hash, (int, long))
422 #print "REQUESTING", tx_hash
423 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
425 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
428 print 'Error handling tx:'
431 # disable for now, for testing impact on stales
432 #factory.new_tx.watch(new_tx)
434 def new_block(block_hash):
435 work_updated.happened()
436 factory.new_block.watch(new_block)
438 print 'Started successfully!'
441 @defer.inlineCallbacks
444 flag = work_updated.get_deferred()
446 yield set_real_work1()
449 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/1))], fireOnOneCallback=True)
451 @defer.inlineCallbacks
454 flag = tracker_updated.get_deferred()
456 yield set_real_work2()
459 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/1))], fireOnOneCallback=True)
464 counter = skiplist.CountsSkipList(tracker, my_script, run_identifier)
467 yield deferral.sleep(random.expovariate(1/1))
469 if current_work.value['best_share_hash'] is not None:
470 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
472 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net)
473 weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
474 count = counter(current_work.value['best_share_hash'], height, 2**100)
475 print 'Pool: %sH/s in %i shares Recent: %.02f%% >%sH/s Shares: %i (%i stale)' % (
478 weights.get(my_script, 0)/total_weight*100,
479 math.format(weights.get(my_script, 0)/total_weight*att_s),
481 len(my_shares) - count,
483 #weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 100), 2**100)
484 #for k, v in weights.iteritems():
485 # print k.encode('hex'), v/total_weight
496 parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,))
497 parser.add_argument('--version', action='version', version=p2pool_init.__version__)
498 parser.add_argument('--testnet',
499 help='use the testnet',
500 action='store_const', const=p2pool.Testnet, default=p2pool.Mainnet, dest='net')
501 parser.add_argument('--debug',
502 help='debugging mode',
503 action='store_const', const=True, default=False, dest='debug')
504 parser.add_argument('-a', '--address',
505 help='generate to this address (defaults to requesting one from bitcoind)',
506 type=str, action='store', default=None, dest='address')
508 p2pool_group = parser.add_argument_group('p2pool interface')
509 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
510 help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
511 type=int, action='store', default=None, dest='p2pool_port')
512 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
513 help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
514 type=str, action='append', default=[], dest='p2pool_nodes')
515 parser.add_argument('-l', '--low-bandwidth',
516 help='trade lower bandwidth usage for higher latency (reduced efficiency)',
517 action='store_true', default=False, dest='low_bandwidth')
519 worker_group = parser.add_argument_group('worker interface')
520 worker_group.add_argument('-w', '--worker-port', metavar='PORT',
521 help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
522 type=int, action='store', default=9332, dest='worker_port')
524 bitcoind_group = parser.add_argument_group('bitcoind interface')
525 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
526 help='connect to a bitcoind at this address (default: 127.0.0.1)',
527 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
528 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
529 help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
530 type=int, action='store', default=8332, dest='bitcoind_rpc_port')
531 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
532 help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
533 type=int, action='store', default=None, dest='bitcoind_p2p_port')
535 bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
536 help='bitcoind RPC interface username',
537 type=str, action='store', dest='bitcoind_rpc_username')
538 bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
539 help='bitcoind RPC interface password',
540 type=str, action='store', dest='bitcoind_rpc_password')
542 args = parser.parse_args()
545 p2pool_init.DEBUG = True
546 class TimestampingPipe(object):
547 def __init__(self, inner_file):
548 self.inner_file = inner_file
550 def write(self, data):
551 buf = self.buf + data
552 lines = buf.split('\n')
553 for line in lines[:-1]:
554 self.inner_file.write("%s %s\n" % (time.strftime("%H:%M:%S"), line))
556 sys.stdout = TimestampingPipe(sys.stdout)
557 sys.stderr = TimestampingPipe(sys.stderr)
559 if args.bitcoind_p2p_port is None:
560 args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
562 if args.p2pool_port is None:
563 args.p2pool_port = args.net.P2P_PORT
565 if args.address is not None:
567 args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
569 raise ValueError("error parsing address: " + repr(e))
571 args.pubkey_hash = None
573 reactor.callWhenRunning(main, args)