3 from __future__ import division
18 from twisted.internet import defer, reactor, task
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
23 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
24 from util import db, expiring_dict, jsonrpc, variable, deferral, math
25 from . import p2p, worker_interface, skiplists
26 import p2pool.data as p2pool
27 import p2pool as p2pool_init
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind, ht):
32 # a block could arrive in between these two queries
33 work = bitcoin.getwork.BlockAttempt.from_getwork((yield bitcoind.rpc_getwork()))
34 height = ht.getHeight(work.previous_block)
35 defer.returnValue((work, height))
37 @deferral.retry('Error getting payout script from bitcoind:', 1)
38 @defer.inlineCallbacks
39 def get_payout_script(factory):
40 res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
41 if res['reply'] == 'success':
42 defer.returnValue(res['script'])
43 elif res['reply'] == 'denied':
44 defer.returnValue(None)
46 raise ValueError('Unexpected reply: %r' % (res,))
48 @deferral.retry('Error creating payout script:', 10)
49 @defer.inlineCallbacks
50 def get_payout_script2(bitcoind, net):
51 defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
53 @defer.inlineCallbacks
59 print 'p2pool (version %s)' % (p2pool_init.__version__,)
62 # connect to bitcoind over JSON-RPC and do initial getwork
63 url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
64 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
65 bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
66 good = yield args.net.BITCOIN_RPC_CHECK(bitcoind)
68 print " Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
70 temp_work = bitcoin.getwork.BlockAttempt.from_getwork((yield bitcoind.rpc_getwork()))
72 print ' Current block hash: %x' % (temp_work.previous_block,)
75 # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
76 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
77 factory = bitcoin.p2p.ClientFactory(args.net)
78 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
79 my_script = yield get_payout_script(factory)
80 if args.pubkey_hash is None:
82 print ' IP transaction denied ... falling back to sending to address.'
83 my_script = yield get_payout_script2(bitcoind, args.net)
85 my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
87 print ' Payout script:', my_script.encode('hex')
90 print 'Loading cached block headers...'
91 ht = bitcoin.p2p.HeightTracker(factory, args.net.HEADERSTORE_FILENAME)
92 print ' ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
95 tracker = p2pool.OkayTracker(args.net)
96 ss = p2pool.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.SHARESTORE_FILENAME), args.net)
97 known_verified = set()
98 print "Loading shares..."
99 for i, (mode, contents) in enumerate(ss.get_shares()):
101 if contents.hash in tracker.shares:
103 contents.shared = True
104 contents.stored = True
105 tracker.add(contents)
106 if len(tracker.shares) % 1000 == 0 and tracker.shares:
107 print " %i" % (len(tracker.shares),)
108 elif mode == 'verified_hash':
109 known_verified.add(contents)
111 raise AssertionError()
112 print " ...inserting %i verified shares..." % (len(known_verified),)
113 for h in known_verified:
114 if h not in tracker.shares:
116 tracker.verified.add(tracker.shares[h])
117 print " ...done loading %i shares!" % (len(tracker.shares),)
119 tracker.added.watch(ss.add_share)
120 tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
122 peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
124 # information affecting work that should trigger a long-polling update
125 current_work = variable.Variable(None)
126 # information affecting work that should not trigger a long-polling update
127 current_work2 = variable.Variable(None)
129 work_updated = variable.Event()
131 requested = expiring_dict.ExpiringDict(300)
133 @defer.inlineCallbacks
134 def set_real_work1():
135 work, height = yield getwork(bitcoind, ht)
136 changed = work.previous_block != current_work.value['previous_block'] if current_work.value is not None else True
137 current_work.set(dict(
138 version=work.version,
139 previous_block=work.previous_block,
142 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
144 current_work2.set(dict(
145 clock_offset=time.time() - work.timestamp,
150 def set_real_work2():
151 best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
153 t = dict(current_work.value)
154 t['best_share_hash'] = best
158 for peer2, share_hash in desired:
159 if share_hash not in tracker.tails: # was received in the time tracker.think was running
161 last_request_time, count = requested.get(share_hash, (None, 0))
162 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
164 potential_peers = set()
165 for head in tracker.tails[share_hash]:
166 potential_peers.update(peer_heads.get(head, set()))
167 potential_peers = [peer for peer in potential_peers if peer.connected2]
168 if count == 0 and peer2 is not None and peer2.connected2:
171 peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
175 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
179 stops=list(set(tracker.heads) | set(
180 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
183 requested[share_hash] = t, count + 1
185 print 'Initializing work...'
186 yield set_real_work1()
191 start_time = time.time() - current_work2.value['clock_offset']
193 # setup p2p logic and join p2pool network
195 def share_share(share, ignore_peer=None):
196 for peer in p2p_node.peers.itervalues():
197 if peer is ignore_peer:
199 #if p2pool_init.DEBUG:
200 # print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
201 peer.send_shares([share])
204 def p2p_shares(shares, peer=None):
206 print 'Processing %i shares...' % (len(shares),)
210 if share.hash in tracker.shares:
211 #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
215 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
218 #for peer2, share_hash in desired:
219 # print 'Requesting parent share %x' % (share_hash,)
220 # peer2.send_getshares(hashes=[share_hash], parents=2000)
222 if share.bitcoin_hash <= share.header['target']:
224 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
226 if factory.conn.value is not None:
227 factory.conn.value.send_block(block=share.as_block(tracker, args.net))
229 print 'No bitcoind connection! Erp!'
231 if shares and peer is not None:
232 peer_heads.setdefault(shares[0].hash, set()).add(peer)
238 print '... done processing %i shares. Have: %i/~%i' % (len(shares), len(tracker.shares), 2*args.net.CHAIN_LENGTH)
240 def p2p_share_hashes(share_hashes, peer):
243 for share_hash in share_hashes:
244 if share_hash in tracker.shares:
246 last_request_time, count = requested.get(share_hash, (None, 0))
247 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
249 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
250 get_hashes.append(share_hash)
251 requested[share_hash] = t, count + 1
253 if share_hashes and peer is not None:
254 peer_heads.setdefault(share_hashes[0], set()).add(peer)
256 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
258 def p2p_get_shares(share_hashes, parents, stops, peer):
259 parents = min(parents, 1000//len(share_hashes))
262 for share_hash in share_hashes:
263 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
264 if share.hash in stops:
267 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
268 peer.send_shares(shares, full=True)
270 print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
274 ip, port = x.split(':')
277 return x, args.net.P2P_PORT
280 ('72.14.191.28', args.net.P2P_PORT),
281 ('62.204.197.159', args.net.P2P_PORT),
282 ('142.58.248.28', args.net.P2P_PORT),
283 ('94.23.34.145', args.net.P2P_PORT),
287 'dabuttonfactory.com',
290 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
292 log.err(None, 'Error resolving bootstrap node IP:')
295 current_work=current_work,
296 port=args.p2pool_port,
298 addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
299 mode=0 if args.low_bandwidth else 1,
300 preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
302 p2p_node.handle_shares = p2p_shares
303 p2p_node.handle_share_hashes = p2p_share_hashes
304 p2p_node.handle_get_shares = p2p_get_shares
308 # send share when the chain changes to their chain
309 def work_changed(new_work):
310 #print 'Work changed:', new_work
311 for share in tracker.get_chain_known(new_work['best_share_hash']):
314 share_share(share, share.peer)
315 current_work.changed.watch(work_changed)
320 @defer.inlineCallbacks
324 is_lan, lan_ip = yield ipdiscover.get_local_ip()
326 pm = yield portmapper.get_port_mapper()
327 yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
329 if p2pool_init.DEBUG:
330 log.err(None, "UPnP error:")
331 yield deferral.sleep(random.expovariate(1/120))
336 # start listening for workers with a JSON-RPC server
338 print 'Listening for workers on port %i...' % (args.worker_port,)
342 merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
343 run_identifier = struct.pack('<Q', random.randrange(2**64))
345 share_counter = skiplists.CountsSkipList(tracker, run_identifier)
346 def get_share_counts():
347 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
348 matching_in_chain = share_counter(current_work.value['best_share_hash'], height)
349 shares_in_chain = my_shares & matching_in_chain
350 stale_shares = my_shares - matching_in_chain
351 return len(shares_in_chain) + len(stale_shares), len(stale_shares)
353 def compute(state, payout_script):
354 if payout_script is None:
355 payout_script = my_script
356 if state['best_share_hash'] is None and args.net.PERSIST:
357 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
358 if len(p2p_node.peers) == 0 and args.net.PERSIST:
359 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
360 pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
361 pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
364 for tx in pre_extra_txs:
365 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
366 if size + this_size > 500000:
371 # XXX assuming generate_tx is smallish here..
372 def get_stale_frac():
373 shares, stale_shares = get_share_counts()
376 frac = stale_shares/shares
377 return 2*struct.pack('<H', int(65535*frac + .5))
378 generate_tx = p2pool.generate_transaction(
380 previous_share_hash=state['best_share_hash'],
381 new_script=payout_script,
382 subsidy=args.net.BITCOIN_SUBSIDY_FUNC(state['height']) + sum(tx.value_in - tx.value_out for tx in extra_txs),
383 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)) + get_stale_frac(),
384 block_target=state['target'],
387 print 'Generating! Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
388 #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
389 #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
390 transactions = [generate_tx] + [tx.tx for tx in extra_txs]
391 merkle_root = bitcoin.data.merkle_hash(transactions)
392 merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
394 timestamp = int(time.time() - current_work2.value['clock_offset'])
395 if state['best_share_hash'] is not None:
396 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
397 if timestamp2 > timestamp:
398 print 'Toff', timestamp2 - timestamp
399 timestamp = timestamp2
400 target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
401 times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
402 #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
403 return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
408 def got_response(data):
410 # match up with transactions
411 header = bitcoin.getwork.decode_data(data)
412 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
413 if transactions is None:
414 print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
416 block = dict(header=header, txs=transactions)
417 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
418 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
419 if factory.conn.value is not None:
420 factory.conn.value.send_block(block=block)
422 print 'No bitcoind connection! Erp!'
423 if hash_ <= block['header']['target']:
425 print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
427 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
429 print 'Received invalid share from worker - %x/%x' % (hash_, target)
431 share = p2pool.Share.from_block(block)
432 my_shares.add(share.hash)
433 print 'GOT SHARE! %s prev %s age %.2fs' % (p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
434 good = share.previous_hash == current_work.value['best_share_hash']
435 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
437 # eg. good = share.hash == current_work.value['best_share_hash'] here
440 log.err(None, 'Error processing data received from worker:')
443 web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
446 if current_work.value['best_share_hash'] is not None:
447 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
448 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height, 720))
449 return json.dumps(att_s)
450 return json.dumps(None)
453 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
454 weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
456 for script in sorted(weights, key=lambda s: weights[s]):
457 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
458 return json.dumps(res)
460 class WebInterface(resource.Resource):
461 def __init__(self, func, mime_type):
462 self.func, self.mime_type = func, mime_type
464 def render_GET(self, request):
465 request.setHeader('Content-Type', self.mime_type)
468 web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
469 web_root.putChild('users', WebInterface(get_users, 'application/json'))
471 web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
473 reactor.listenTCP(args.worker_port, server.Site(web_root))
480 tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
481 get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
484 def __init__(self, tx, seen_at_block):
485 self.hash = bitcoin.data.tx_type.hash256(tx)
487 self.seen_at_block = seen_at_block
488 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
490 #print '%x %r' % (seen_at_block, tx)
491 #for mention in self.mentions:
492 # print '%x' % mention
494 self.parents_all_in_blocks = False
497 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
498 self._find_parents_in_blocks()
500 @defer.inlineCallbacks
501 def _find_parents_in_blocks(self):
502 for tx_in in self.tx['tx_ins']:
504 raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
507 self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
508 #print raw_transaction
509 if not raw_transaction['parent_blocks']:
511 self.parents_all_in_blocks = True
514 if not self.parents_all_in_blocks:
520 @defer.inlineCallbacks
523 assert isinstance(tx_hash, (int, long))
524 #print 'REQUESTING', tx_hash
525 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
527 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
529 log.err(None, 'Error handling tx:')
530 # disable for now, for testing impact on stales
531 #factory.new_tx.watch(new_tx)
533 def new_block(block_hash):
534 work_updated.happened()
535 factory.new_block.watch(new_block)
537 print 'Started successfully!'
540 ht.updated.watch(set_real_work2)
542 @defer.inlineCallbacks
545 flag = work_updated.get_deferred()
547 yield set_real_work1()
550 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/20))], fireOnOneCallback=True)
552 @defer.inlineCallbacks
559 yield deferral.sleep(random.expovariate(1/20))
565 def watchdog_handler(signum, frame):
566 print "Watchdog timer went off at:"
567 traceback.print_exc()
569 signal.signal(signal.SIGALRM, watchdog_handler)
570 task.LoopingCall(signal.alarm, 30).start(1)
573 def read_stale_frac(share):
574 if len(share.nonce) != 20:
576 a, b = struct.unpack("<HH", share.nonce[-4:])
582 yield deferral.sleep(3)
584 if current_work.value['best_share_hash'] is not None:
585 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
587 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 120))
588 weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
589 shares, stale_shares = get_share_counts()
590 print 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i stale) Peers: %i' % (
593 len(tracker.verified.shares),
595 weights.get(my_script, 0)/total_weight*100,
596 math.format(weights.get(my_script, 0)/total_weight*att_s),
600 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
601 fracs = [read_stale_frac(share) for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if read_stale_frac(share) is not None]
603 med = math.median(fracs)
604 print 'Median stale proportion:', med
606 print ' Own:', stale_shares/shares
608 print ' Own efficiency: %.02f%%' % (100*(1 - stale_shares/shares)/(1 - med),)
614 log.err(None, 'Fatal error:')
619 parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,), fromfile_prefix_chars='@')
620 parser.convert_arg_line_to_args = lambda arg_line: (arg for arg in arg_line.split() if arg.strip())
621 parser.add_argument('--version', action='version', version=p2pool_init.__version__)
622 parser.add_argument('--net',
623 help='use specified network (choices: bitcoin (default), namecoin, ixcoin)',
624 action='store', choices=set(['bitcoin', 'namecoin', 'ixcoin']), default='bitcoin', dest='net_name')
625 parser.add_argument('--testnet',
626 help='use the testnet',
627 action='store_const', const=True, default=False, dest='testnet')
628 parser.add_argument('--debug',
629 help='debugging mode',
630 action='store_const', const=True, default=False, dest='debug')
631 parser.add_argument('-a', '--address',
632 help='generate to this address (defaults to requesting one from bitcoind)',
633 type=str, action='store', default=None, dest='address')
634 parser.add_argument('--charts',
635 help='generate charts on the web interface (requires PIL and pygame)',
636 action='store_const', const=True, default=False, dest='charts')
638 p2pool_group = parser.add_argument_group('p2pool interface')
639 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
640 help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
641 type=int, action='store', default=None, dest='p2pool_port')
642 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
643 help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
644 type=str, action='append', default=[], dest='p2pool_nodes')
645 parser.add_argument('-l', '--low-bandwidth',
646 help='trade lower bandwidth usage for higher latency (reduced efficiency)',
647 action='store_true', default=False, dest='low_bandwidth')
648 parser.add_argument('--disable-upnp',
649 help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
650 action='store_false', default=True, dest='upnp')
652 worker_group = parser.add_argument_group('worker interface')
653 worker_group.add_argument('-w', '--worker-port', metavar='PORT',
654 help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
655 type=int, action='store', default=9332, dest='worker_port')
657 bitcoind_group = parser.add_argument_group('bitcoind interface')
658 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
659 help='connect to a bitcoind at this address (default: 127.0.0.1)',
660 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
661 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
662 help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
663 type=int, action='store', default=8332, dest='bitcoind_rpc_port')
664 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
665 help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
666 type=int, action='store', default=None, dest='bitcoind_p2p_port')
668 bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
669 help='bitcoind RPC interface username',
670 type=str, action='store', dest='bitcoind_rpc_username')
671 bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
672 help='bitcoind RPC interface password',
673 type=str, action='store', dest='bitcoind_rpc_password')
675 args = parser.parse_args()
678 p2pool_init.DEBUG = True
679 class ReopeningFile(object):
680 def __init__(self, *open_args, **open_kwargs):
681 self.open_args, self.open_kwargs = open_args, open_kwargs
682 self.inner_file = open(*self.open_args, **self.open_kwargs)
684 self.inner_file.close()
685 self.inner_file = open(*self.open_args, **self.open_kwargs)
686 def write(self, data):
687 self.inner_file.write(data)
689 self.inner_file.flush()
690 class TeePipe(object):
691 def __init__(self, outputs):
692 self.outputs = outputs
693 def write(self, data):
694 for output in self.outputs:
697 for output in self.outputs:
699 class TimestampingPipe(object):
700 def __init__(self, inner_file):
701 self.inner_file = inner_file
704 def write(self, data):
705 buf = self.buf + data
706 lines = buf.split('\n')
707 for line in lines[:-1]:
708 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
709 self.inner_file.flush()
713 logfile = ReopeningFile(os.path.join(os.path.dirname(sys.argv[0]), 'debug.log'), 'w')
714 sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
715 if hasattr(signal, "SIGUSR1"):
716 def sigusr1(signum, frame):
717 print '''Caught SIGUSR1, closing 'debug.log'...'''
719 print '''...and reopened 'debug.log' after catching SIGUSR1.'''
720 signal.signal(signal.SIGUSR1, sigusr1)
723 ('bitcoin', False): p2pool.Mainnet,
724 ('bitcoin', True): p2pool.Testnet,
725 ('namecoin', False): p2pool.NamecoinMainnet,
726 ('namecoin', True): p2pool.NamecoinTestnet,
727 ('ixcoin', False): p2pool.IxcoinMainnet,
728 ('ixcoin', True): p2pool.IxcoinTestnet,
729 }[args.net_name, args.testnet]
731 if args.bitcoind_p2p_port is None:
732 args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
734 if args.p2pool_port is None:
735 args.p2pool_port = args.net.P2P_PORT
737 if args.address is not None:
739 args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
741 raise ValueError('error parsing address: ' + repr(e))
743 args.pubkey_hash = None
745 reactor.callWhenRunning(main, args)