3 from __future__ import division
18 from twisted.internet import defer, reactor, task
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
23 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
24 from util import db, expiring_dict, jsonrpc, variable, deferral, math
25 from . import p2p, worker_interface, skiplists
26 import p2pool.data as p2pool
27 import p2pool as p2pool_init
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind):
32 # a block could arrive in between these two queries
33 getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
35 getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
37 # get rid of residual errors
38 getwork_df.addErrback(lambda fail: None)
39 height_df.addErrback(lambda fail: None)
40 defer.returnValue((getwork, height))
42 @deferral.retry('Error getting payout script from bitcoind:', 1)
43 @defer.inlineCallbacks
44 def get_payout_script(factory):
45 res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
46 if res['reply'] == 'success':
47 defer.returnValue(res['script'])
48 elif res['reply'] == 'denied':
49 defer.returnValue(None)
51 raise ValueError('Unexpected reply: %r' % (res,))
53 @deferral.retry('Error creating payout script:', 10)
54 @defer.inlineCallbacks
55 def get_payout_script2(bitcoind, net):
56 defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
58 @defer.inlineCallbacks
64 print 'p2pool (version %s)' % (p2pool_init.__version__,)
67 # connect to bitcoind over JSON-RPC and do initial getwork
68 url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
69 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
70 bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
71 good = yield args.net.BITCOIN_RPC_CHECK(bitcoind)
73 print " Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
75 temp_work, temp_height = yield getwork(bitcoind)
77 print ' Current block hash: %x height: %i' % (temp_work.previous_block, temp_height)
80 # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
81 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
82 factory = bitcoin.p2p.ClientFactory(args.net)
83 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
84 my_script = yield get_payout_script(factory)
85 if args.pubkey_hash is None:
87 print ' IP transaction denied ... falling back to sending to address.'
88 my_script = yield get_payout_script2(bitcoind, args.net)
90 my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
92 print ' Payout script:', my_script.encode('hex')
95 print 'Loading cached block headers...'
96 ht = bitcoin.p2p.HeightTracker(factory, args.net.HEADERSTORE_FILENAME)
97 print ' ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
100 tracker = p2pool.OkayTracker(args.net)
101 ss = p2pool.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.SHARESTORE_FILENAME), args.net)
102 known_verified = set()
103 print "Loading shares..."
104 for i, (mode, contents) in enumerate(ss.get_shares()):
106 if contents.hash in tracker.shares:
108 contents.shared = True
109 contents.stored = True
110 tracker.add(contents)
111 if len(tracker.shares) % 1000 == 0 and tracker.shares:
112 print " %i" % (len(tracker.shares),)
113 elif mode == 'verified_hash':
114 known_verified.add(contents)
116 raise AssertionError()
117 print " ...inserting %i verified shares..." % (len(known_verified),)
118 for h in known_verified:
119 if h not in tracker.shares:
121 tracker.verified.add(tracker.shares[h])
122 print " ...done loading %i shares!" % (len(tracker.shares),)
124 tracker.added.watch(ss.add_share)
125 tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
127 peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
129 # information affecting work that should trigger a long-polling update
130 current_work = variable.Variable(None)
131 # information affecting work that should not trigger a long-polling update
132 current_work2 = variable.Variable(None)
134 work_updated = variable.Event()
136 requested = expiring_dict.ExpiringDict(300)
138 @defer.inlineCallbacks
139 def set_real_work1():
140 work, height = yield getwork(bitcoind)
141 changed = work.previous_block != current_work.value['previous_block'] if current_work.value is not None else True
142 current_work.set(dict(
143 version=work.version,
144 previous_block=work.previous_block,
147 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
149 current_work2.set(dict(
150 clock_offset=time.time() - work.timestamp,
155 def set_real_work2():
156 best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
158 t = dict(current_work.value)
159 t['best_share_hash'] = best
163 for peer2, share_hash in desired:
164 if share_hash not in tracker.tails: # was received in the time tracker.think was running
166 last_request_time, count = requested.get(share_hash, (None, 0))
167 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
169 potential_peers = set()
170 for head in tracker.tails[share_hash]:
171 potential_peers.update(peer_heads.get(head, set()))
172 potential_peers = [peer for peer in potential_peers if peer.connected2]
173 if count == 0 and peer2 is not None and peer2.connected2:
176 peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
180 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
184 stops=list(set(tracker.heads) | set(
185 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
188 requested[share_hash] = t, count + 1
190 print 'Initializing work...'
191 yield set_real_work1()
196 start_time = time.time() - current_work2.value['clock_offset']
198 # setup p2p logic and join p2pool network
200 def share_share(share, ignore_peer=None):
201 for peer in p2p_node.peers.itervalues():
202 if peer is ignore_peer:
204 #if p2pool_init.DEBUG:
205 # print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
206 peer.send_shares([share])
209 def p2p_shares(shares, peer=None):
211 print 'Processing %i shares...' % (len(shares),)
215 if share.hash in tracker.shares:
216 #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
220 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
223 #for peer2, share_hash in desired:
224 # print 'Requesting parent share %x' % (share_hash,)
225 # peer2.send_getshares(hashes=[share_hash], parents=2000)
227 if share.bitcoin_hash <= share.header['target']:
229 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
231 if factory.conn.value is not None:
232 factory.conn.value.send_block(block=share.as_block(tracker, args.net))
234 print 'No bitcoind connection! Erp!'
236 if shares and peer is not None:
237 peer_heads.setdefault(shares[0].hash, set()).add(peer)
243 print '... done processing %i shares. Have: %i/~%i' % (len(shares), len(tracker.shares), 2*args.net.CHAIN_LENGTH)
245 def p2p_share_hashes(share_hashes, peer):
248 for share_hash in share_hashes:
249 if share_hash in tracker.shares:
251 last_request_time, count = requested.get(share_hash, (None, 0))
252 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
254 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
255 get_hashes.append(share_hash)
256 requested[share_hash] = t, count + 1
258 if share_hashes and peer is not None:
259 peer_heads.setdefault(share_hashes[0], set()).add(peer)
261 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
263 def p2p_get_shares(share_hashes, parents, stops, peer):
264 parents = min(parents, 1000//len(share_hashes))
267 for share_hash in share_hashes:
268 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
269 if share.hash in stops:
272 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
273 peer.send_shares(shares, full=True)
275 print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
279 ip, port = x.split(':')
282 return x, args.net.P2P_PORT
285 ('72.14.191.28', args.net.P2P_PORT),
286 ('62.204.197.159', args.net.P2P_PORT),
287 ('142.58.248.28', args.net.P2P_PORT),
288 ('94.23.34.145', args.net.P2P_PORT),
292 'dabuttonfactory.com',
295 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
297 log.err(None, 'Error resolving bootstrap node IP:')
300 current_work=current_work,
301 port=args.p2pool_port,
303 addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
304 mode=0 if args.low_bandwidth else 1,
305 preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
307 p2p_node.handle_shares = p2p_shares
308 p2p_node.handle_share_hashes = p2p_share_hashes
309 p2p_node.handle_get_shares = p2p_get_shares
313 # send share when the chain changes to their chain
314 def work_changed(new_work):
315 #print 'Work changed:', new_work
316 for share in tracker.get_chain_known(new_work['best_share_hash']):
319 share_share(share, share.peer)
320 current_work.changed.watch(work_changed)
325 @defer.inlineCallbacks
329 is_lan, lan_ip = yield ipdiscover.get_local_ip()
331 pm = yield portmapper.get_port_mapper()
332 yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
334 if p2pool_init.DEBUG:
335 log.err(None, "UPnP error:")
336 yield deferral.sleep(random.expovariate(1/120))
341 # start listening for workers with a JSON-RPC server
343 print 'Listening for workers on port %i...' % (args.worker_port,)
347 merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
348 run_identifier = struct.pack('<Q', random.randrange(2**64))
350 share_counter = skiplists.CountsSkipList(tracker, run_identifier)
351 def get_share_counts():
352 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
353 matching_in_chain = share_counter(current_work.value['best_share_hash'], height)
354 shares_in_chain = my_shares & matching_in_chain
355 stale_shares = my_shares - matching_in_chain
356 return len(shares_in_chain) + len(stale_shares), len(stale_shares)
358 def compute(state, payout_script):
359 if payout_script is None:
360 payout_script = my_script
361 if state['best_share_hash'] is None and args.net.PERSIST:
362 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
363 if len(p2p_node.peers) == 0 and args.net.PERSIST:
364 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
365 pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
366 pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
369 for tx in pre_extra_txs:
370 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
371 if size + this_size > 500000:
376 # XXX assuming generate_tx is smallish here..
377 def get_stale_frac():
378 shares, stale_shares = get_share_counts()
381 frac = stale_shares/shares
382 return 2*struct.pack('<H', int(65535*frac + .5))
383 generate_tx = p2pool.generate_transaction(
385 previous_share_hash=state['best_share_hash'],
386 new_script=payout_script,
387 subsidy=args.net.BITCOIN_SUBSIDY_FUNC(state['height']) + sum(tx.value_in - tx.value_out for tx in extra_txs),
388 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)) + get_stale_frac(),
389 block_target=state['target'],
392 print 'Generating! Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
393 #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
394 #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
395 transactions = [generate_tx] + [tx.tx for tx in extra_txs]
396 merkle_root = bitcoin.data.merkle_hash(transactions)
397 merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
399 timestamp = int(time.time() - current_work2.value['clock_offset'])
400 if state['best_share_hash'] is not None:
401 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
402 if timestamp2 > timestamp:
403 print 'Toff', timestamp2 - timestamp
404 timestamp = timestamp2
405 target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
406 times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
407 #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
408 return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
413 def got_response(data):
415 # match up with transactions
416 header = bitcoin.getwork.decode_data(data)
417 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
418 if transactions is None:
419 print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
421 block = dict(header=header, txs=transactions)
422 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
423 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
424 if factory.conn.value is not None:
425 factory.conn.value.send_block(block=block)
427 print 'No bitcoind connection! Erp!'
428 if hash_ <= block['header']['target']:
430 print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
432 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
434 print 'Received invalid share from worker - %x/%x' % (hash_, target)
436 share = p2pool.Share.from_block(block)
437 my_shares.add(share.hash)
438 print 'GOT SHARE! %s prev %s age %.2fs' % (p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
439 good = share.previous_hash == current_work.value['best_share_hash']
440 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
442 # eg. good = share.hash == current_work.value['best_share_hash'] here
445 log.err(None, 'Error processing data received from worker:')
448 web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
451 if current_work.value['best_share_hash'] is not None:
452 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
453 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height, 720))
454 return json.dumps(att_s)
455 return json.dumps(None)
458 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
459 weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
461 for script in sorted(weights, key=lambda s: weights[s]):
462 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
463 return json.dumps(res)
465 class WebInterface(resource.Resource):
466 def __init__(self, func, mime_type):
467 self.func, self.mime_type = func, mime_type
469 def render_GET(self, request):
470 request.setHeader('Content-Type', self.mime_type)
473 web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
474 web_root.putChild('users', WebInterface(get_users, 'application/json'))
476 web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
478 reactor.listenTCP(args.worker_port, server.Site(web_root))
485 tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
486 get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
489 def __init__(self, tx, seen_at_block):
490 self.hash = bitcoin.data.tx_type.hash256(tx)
492 self.seen_at_block = seen_at_block
493 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
495 #print '%x %r' % (seen_at_block, tx)
496 #for mention in self.mentions:
497 # print '%x' % mention
499 self.parents_all_in_blocks = False
502 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
503 self._find_parents_in_blocks()
505 @defer.inlineCallbacks
506 def _find_parents_in_blocks(self):
507 for tx_in in self.tx['tx_ins']:
509 raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
512 self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
513 #print raw_transaction
514 if not raw_transaction['parent_blocks']:
516 self.parents_all_in_blocks = True
519 if not self.parents_all_in_blocks:
525 @defer.inlineCallbacks
528 assert isinstance(tx_hash, (int, long))
529 #print 'REQUESTING', tx_hash
530 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
532 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
534 log.err(None, 'Error handling tx:')
535 # disable for now, for testing impact on stales
536 #factory.new_tx.watch(new_tx)
538 def new_block(block_hash):
539 work_updated.happened()
540 factory.new_block.watch(new_block)
542 print 'Started successfully!'
545 ht.updated.watch(set_real_work2)
547 @defer.inlineCallbacks
550 flag = work_updated.get_deferred()
552 yield set_real_work1()
555 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/20))], fireOnOneCallback=True)
557 @defer.inlineCallbacks
564 yield deferral.sleep(random.expovariate(1/20))
570 def watchdog_handler(signum, frame):
571 print "Watchdog timer went off at:"
572 traceback.print_exc()
574 signal.signal(signal.SIGALRM, watchdog_handler)
575 task.LoopingCall(signal.alarm, 30).start(1)
578 def read_stale_frac(share):
579 if len(share.nonce) != 20:
581 a, b = struct.unpack("<HH", share.nonce[-4:])
587 yield deferral.sleep(3)
589 if current_work.value['best_share_hash'] is not None:
590 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
592 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 120))
593 weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
594 shares, stale_shares = get_share_counts()
595 print 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i stale) Peers: %i' % (
598 len(tracker.verified.shares),
600 weights.get(my_script, 0)/total_weight*100,
601 math.format(weights.get(my_script, 0)/total_weight*att_s),
605 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
606 fracs = [read_stale_frac(share) for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if read_stale_frac(share) is not None]
608 med = math.median(fracs)
609 print 'Median stale proportion:', med
611 print ' Own:', stale_shares/shares
613 print ' Own efficiency: %.02f%%' % (100*(1 - stale_shares/shares)/(1 - med),)
619 log.err(None, 'Fatal error:')
624 parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,), fromfile_prefix_chars='@')
625 parser.convert_arg_line_to_args = lambda arg_line: (arg for arg in arg_line.split() if arg.strip())
626 parser.add_argument('--version', action='version', version=p2pool_init.__version__)
627 parser.add_argument('--net',
628 help='use specified network (choices: bitcoin (default), namecoin, ixcoin)',
629 action='store', choices=set(['bitcoin', 'namecoin', 'ixcoin']), default='bitcoin', dest='net_name')
630 parser.add_argument('--testnet',
631 help='use the testnet',
632 action='store_const', const=True, default=False, dest='testnet')
633 parser.add_argument('--debug',
634 help='debugging mode',
635 action='store_const', const=True, default=False, dest='debug')
636 parser.add_argument('-a', '--address',
637 help='generate to this address (defaults to requesting one from bitcoind)',
638 type=str, action='store', default=None, dest='address')
639 parser.add_argument('--charts',
640 help='generate charts on the web interface (requires PIL and pygame)',
641 action='store_const', const=True, default=False, dest='charts')
643 p2pool_group = parser.add_argument_group('p2pool interface')
644 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
645 help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
646 type=int, action='store', default=None, dest='p2pool_port')
647 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
648 help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
649 type=str, action='append', default=[], dest='p2pool_nodes')
650 parser.add_argument('-l', '--low-bandwidth',
651 help='trade lower bandwidth usage for higher latency (reduced efficiency)',
652 action='store_true', default=False, dest='low_bandwidth')
653 parser.add_argument('--disable-upnp',
654 help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
655 action='store_false', default=True, dest='upnp')
657 worker_group = parser.add_argument_group('worker interface')
658 worker_group.add_argument('-w', '--worker-port', metavar='PORT',
659 help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
660 type=int, action='store', default=9332, dest='worker_port')
662 bitcoind_group = parser.add_argument_group('bitcoind interface')
663 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
664 help='connect to a bitcoind at this address (default: 127.0.0.1)',
665 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
666 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
667 help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
668 type=int, action='store', default=8332, dest='bitcoind_rpc_port')
669 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
670 help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
671 type=int, action='store', default=None, dest='bitcoind_p2p_port')
673 bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
674 help='bitcoind RPC interface username',
675 type=str, action='store', dest='bitcoind_rpc_username')
676 bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
677 help='bitcoind RPC interface password',
678 type=str, action='store', dest='bitcoind_rpc_password')
680 args = parser.parse_args()
683 p2pool_init.DEBUG = True
684 class ReopeningFile(object):
685 def __init__(self, *open_args, **open_kwargs):
686 self.open_args, self.open_kwargs = open_args, open_kwargs
687 self.inner_file = open(*self.open_args, **self.open_kwargs)
689 self.inner_file.close()
690 self.inner_file = open(*self.open_args, **self.open_kwargs)
691 def write(self, data):
692 self.inner_file.write(data)
694 self.inner_file.flush()
695 class TeePipe(object):
696 def __init__(self, outputs):
697 self.outputs = outputs
698 def write(self, data):
699 for output in self.outputs:
702 for output in self.outputs:
704 class TimestampingPipe(object):
705 def __init__(self, inner_file):
706 self.inner_file = inner_file
709 def write(self, data):
710 buf = self.buf + data
711 lines = buf.split('\n')
712 for line in lines[:-1]:
713 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
714 self.inner_file.flush()
718 logfile = ReopeningFile(os.path.join(os.path.dirname(sys.argv[0]), 'debug.log'), 'w')
719 sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
720 if hasattr(signal, "SIGUSR1"):
721 def sigusr1(signum, frame):
722 print '''Caught SIGUSR1, closing 'debug.log'...'''
724 print '''...and reopened 'debug.log' after catching SIGUSR1.'''
725 signal.signal(signal.SIGUSR1, sigusr1)
728 ('bitcoin', False): p2pool.Mainnet,
729 ('bitcoin', True): p2pool.Testnet,
730 ('namecoin', False): p2pool.NamecoinMainnet,
731 ('namecoin', True): p2pool.NamecoinTestnet,
732 ('ixcoin', False): p2pool.IxcoinMainnet,
733 ('ixcoin', True): p2pool.IxcoinTestnet,
734 }[args.net_name, args.testnet]
736 if args.bitcoind_p2p_port is None:
737 args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
739 if args.p2pool_port is None:
740 args.p2pool_port = args.net.P2P_PORT
742 if args.address is not None:
744 args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
746 raise ValueError('error parsing address: ' + repr(e))
748 args.pubkey_hash = None
750 reactor.callWhenRunning(main, args)