3 from __future__ import division
18 from twisted.internet import defer, reactor, task
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
23 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
24 from util import db, expiring_dict, jsonrpc, variable, deferral, math
25 from . import p2p, worker_interface, skiplists
26 import p2pool.data as p2pool
27 import p2pool as p2pool_init
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind):
32 # a block could arrive in between these two queries
33 getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
35 getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
37 # get rid of residual errors
38 getwork_df.addErrback(lambda fail: None)
39 height_df.addErrback(lambda fail: None)
40 defer.returnValue((getwork, height))
42 @deferral.retry('Error getting payout script from bitcoind:', 1)
43 @defer.inlineCallbacks
44 def get_payout_script(factory):
45 res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
46 if res['reply'] == 'success':
47 defer.returnValue(res['script'])
48 elif res['reply'] == 'denied':
49 defer.returnValue(None)
51 raise ValueError('Unexpected reply: %r' % (res,))
53 @deferral.retry('Error creating payout script:', 10)
54 @defer.inlineCallbacks
55 def get_payout_script2(bitcoind, net):
56 defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
58 @defer.inlineCallbacks
64 print 'p2pool (version %s)' % (p2pool_init.__version__,)
67 # connect to bitcoind over JSON-RPC and do initial getwork
68 url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
69 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
70 bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
71 good = yield args.net.BITCOIN_RPC_CHECK(bitcoind)
73 print " Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
75 temp_work, temp_height = yield getwork(bitcoind)
77 print ' Current block hash: %x height: %i' % (temp_work.previous_block, temp_height)
80 # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
81 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
82 factory = bitcoin.p2p.ClientFactory(args.net)
83 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
84 my_script = yield get_payout_script(factory)
85 if args.pubkey_hash is None:
87 print ' IP transaction denied ... falling back to sending to address.'
88 my_script = yield get_payout_script2(bitcoind, args.net)
90 my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
92 print ' Payout script:', my_script.encode('hex')
95 print 'Loading cached block headers...'
96 ht = bitcoin.p2p.HeightTracker(factory, args.net.HEADERSTORE_FILENAME)
97 print ' ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
100 tracker = p2pool.OkayTracker(args.net)
101 ss = p2pool.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.SHARESTORE_FILENAME), args.net)
102 known_verified = set()
103 print "Loading shares..."
104 for i, (mode, contents) in enumerate(ss.get_shares()):
106 if contents.hash in tracker.shares:
108 contents.shared = True
109 contents.stored = True
110 tracker.add(contents)
111 if len(tracker.shares) % 1000 == 0 and tracker.shares:
112 print " %i" % (len(tracker.shares),)
113 elif mode == 'verified_hash':
114 known_verified.add(contents)
116 raise AssertionError()
117 print " ...inserting %i verified shares..." % (len(known_verified),)
118 for h in known_verified:
119 if h not in tracker.shares:
121 tracker.verified.add(tracker.shares[h])
122 print " ...done loading %i shares!" % (len(tracker.shares),)
124 tracker.added.watch(ss.add_share)
125 tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
127 peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
129 # information affecting work that should trigger a long-polling update
130 current_work = variable.Variable(None)
131 # information affecting work that should not trigger a long-polling update
132 current_work2 = variable.Variable(None)
134 work_updated = variable.Event()
136 requested = expiring_dict.ExpiringDict(300)
138 @defer.inlineCallbacks
139 def set_real_work1():
140 work, height = yield getwork(bitcoind)
141 changed = work.previous_block != current_work.value['previous_block'] if current_work.value is not None else True
142 current_work.set(dict(
143 version=work.version,
144 previous_block=work.previous_block,
147 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
149 current_work2.set(dict(
150 clock_offset=time.time() - work.timestamp,
155 def set_real_work2():
156 best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
158 t = dict(current_work.value)
159 t['best_share_hash'] = best
163 for peer2, share_hash in desired:
164 if share_hash not in tracker.tails: # was received in the time tracker.think was running
166 last_request_time, count = requested.get(share_hash, (None, 0))
167 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
169 potential_peers = set()
170 for head in tracker.tails[share_hash]:
171 potential_peers.update(peer_heads.get(head, set()))
172 potential_peers = [peer for peer in potential_peers if peer.connected2]
173 if count == 0 and peer2 is not None and peer2.connected2:
176 peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
180 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
184 stops=list(set(tracker.heads) | set(
185 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
188 requested[share_hash] = t, count + 1
190 print 'Initializing work...'
191 yield set_real_work1()
196 start_time = time.time() - current_work2.value['clock_offset']
198 # setup p2p logic and join p2pool network
200 def share_share(share, ignore_peer=None):
201 for peer in p2p_node.peers.itervalues():
202 if peer is ignore_peer:
204 #if p2pool_init.DEBUG:
205 # print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
206 peer.send_shares([share])
209 def p2p_shares(shares, peer=None):
211 print 'Processing %i shares...' % (len(shares),)
215 if share.hash in tracker.shares:
216 #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
220 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
223 #for peer2, share_hash in desired:
224 # print 'Requesting parent share %x' % (share_hash,)
225 # peer2.send_getshares(hashes=[share_hash], parents=2000)
227 if share.bitcoin_hash <= share.header['target']:
229 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
231 if factory.conn.value is not None:
232 factory.conn.value.send_block(block=share.as_block(tracker, args.net))
234 print 'No bitcoind connection! Erp!'
236 if shares and peer is not None:
237 peer_heads.setdefault(shares[0].hash, set()).add(peer)
243 print '... done processing %i shares. Have: %i/~%i' % (len(shares), len(tracker.shares), 2*args.net.CHAIN_LENGTH)
245 def p2p_share_hashes(share_hashes, peer):
248 for share_hash in share_hashes:
249 if share_hash in tracker.shares:
251 last_request_time, count = requested.get(share_hash, (None, 0))
252 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
254 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
255 get_hashes.append(share_hash)
256 requested[share_hash] = t, count + 1
258 if share_hashes and peer is not None:
259 peer_heads.setdefault(share_hashes[0], set()).add(peer)
261 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
263 def p2p_get_shares(share_hashes, parents, stops, peer):
264 parents = min(parents, 1000//len(share_hashes))
267 for share_hash in share_hashes:
268 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
269 if share.hash in stops:
272 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
273 peer.send_shares(shares, full=True)
275 print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
279 ip, port = x.split(':')
282 return x, args.net.P2P_PORT
285 ('72.14.191.28', args.net.P2P_PORT),
286 ('62.204.197.159', args.net.P2P_PORT),
287 ('142.58.248.28', args.net.P2P_PORT),
288 ('94.23.34.145', args.net.P2P_PORT),
292 'dabuttonfactory.com',
295 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
297 log.err(None, 'Error resolving bootstrap node IP:')
300 current_work=current_work,
301 port=args.p2pool_port,
303 addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
304 mode=0 if args.low_bandwidth else 1,
305 preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
307 p2p_node.handle_shares = p2p_shares
308 p2p_node.handle_share_hashes = p2p_share_hashes
309 p2p_node.handle_get_shares = p2p_get_shares
313 # send share when the chain changes to their chain
314 def work_changed(new_work):
315 #print 'Work changed:', new_work
316 for share in tracker.get_chain_known(new_work['best_share_hash']):
319 share_share(share, share.peer)
320 current_work.changed.watch(work_changed)
325 @defer.inlineCallbacks
329 is_lan, lan_ip = yield ipdiscover.get_local_ip()
332 pm = yield portmapper.get_port_mapper()
333 yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
335 if p2pool_init.DEBUG:
336 log.err(None, "UPnP error:")
337 yield deferral.sleep(random.expovariate(1/120))
342 # start listening for workers with a JSON-RPC server
344 print 'Listening for workers on port %i...' % (args.worker_port,)
348 merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
349 run_identifier = struct.pack('<Q', random.randrange(2**64))
351 def compute(state, payout_script):
352 if payout_script is None:
353 payout_script = my_script
354 if state['best_share_hash'] is None and args.net.PERSIST:
355 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
356 if len(p2p_node.peers) == 0 and args.net.PERSIST:
357 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
358 pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
359 pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
362 for tx in pre_extra_txs:
363 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
364 if size + this_size > 500000:
369 # XXX assuming generate_tx is smallish here..
370 generate_tx = p2pool.generate_transaction(
372 previous_share_hash=state['best_share_hash'],
373 new_script=payout_script,
374 subsidy=(50*100000000 >> (state['height'] + 1)//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs),
375 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
376 block_target=state['target'],
379 print 'Generating! Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
380 #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
381 #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
382 transactions = [generate_tx] + [tx.tx for tx in extra_txs]
383 merkle_root = bitcoin.data.merkle_hash(transactions)
384 merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
386 timestamp = int(time.time() - current_work2.value['clock_offset'])
387 if state['best_share_hash'] is not None:
388 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
389 if timestamp2 > timestamp:
390 print 'Toff', timestamp2 - timestamp
391 timestamp = timestamp2
392 target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
393 times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
394 #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
395 return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
400 def got_response(data):
402 # match up with transactions
403 header = bitcoin.getwork.decode_data(data)
404 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
405 if transactions is None:
406 print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
408 block = dict(header=header, txs=transactions)
409 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
410 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
411 if factory.conn.value is not None:
412 factory.conn.value.send_block(block=block)
414 print 'No bitcoind connection! Erp!'
415 if hash_ <= block['header']['target']:
417 print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
419 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
421 print 'Received invalid share from worker - %x/%x' % (hash_, target)
423 share = p2pool.Share.from_block(block)
424 my_shares.add(share.hash)
425 print 'GOT SHARE! %s prev %s age %.2fs' % (p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
426 good = share.previous_hash == current_work.value['best_share_hash']
427 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
429 # eg. good = share.hash == current_work.value['best_share_hash'] here
432 log.err(None, 'Error processing data received from worker:')
435 web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
438 if current_work.value['best_share_hash'] is not None:
439 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
440 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height, 720))
441 return json.dumps(att_s)
442 return json.dumps(None)
445 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
446 weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
448 for script in sorted(weights, key=lambda s: weights[s]):
449 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
450 return json.dumps(res)
452 class WebInterface(resource.Resource):
453 def __init__(self, func, mime_type):
454 self.func, self.mime_type = func, mime_type
456 def render_GET(self, request):
457 request.setHeader('Content-Type', self.mime_type)
460 web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
461 web_root.putChild('users', WebInterface(get_users, 'application/json'))
463 web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
465 reactor.listenTCP(args.worker_port, server.Site(web_root))
472 tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
473 get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
476 def __init__(self, tx, seen_at_block):
477 self.hash = bitcoin.data.tx_type.hash256(tx)
479 self.seen_at_block = seen_at_block
480 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
482 #print '%x %r' % (seen_at_block, tx)
483 #for mention in self.mentions:
484 # print '%x' % mention
486 self.parents_all_in_blocks = False
489 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
490 self._find_parents_in_blocks()
492 @defer.inlineCallbacks
493 def _find_parents_in_blocks(self):
494 for tx_in in self.tx['tx_ins']:
496 raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
499 self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
500 #print raw_transaction
501 if not raw_transaction['parent_blocks']:
503 self.parents_all_in_blocks = True
506 if not self.parents_all_in_blocks:
512 @defer.inlineCallbacks
515 assert isinstance(tx_hash, (int, long))
516 #print 'REQUESTING', tx_hash
517 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
519 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
521 log.err(None, 'Error handling tx:')
522 # disable for now, for testing impact on stales
523 #factory.new_tx.watch(new_tx)
525 def new_block(block_hash):
526 work_updated.happened()
527 factory.new_block.watch(new_block)
529 print 'Started successfully!'
532 ht.updated.watch(set_real_work2)
534 @defer.inlineCallbacks
537 flag = work_updated.get_deferred()
539 yield set_real_work1()
542 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/20))], fireOnOneCallback=True)
544 @defer.inlineCallbacks
551 yield deferral.sleep(random.expovariate(1/20))
557 def watchdog_handler(signum, frame):
558 print "Watchdog timer went off at:"
559 traceback.print_exc()
561 signal.signal(signal.SIGALRM, watchdog_handler)
562 task.LoopingCall(signal.alarm, 30).start(1)
565 counter = skiplists.CountsSkipList(tracker, run_identifier)
568 yield deferral.sleep(3)
570 if current_work.value['best_share_hash'] is not None:
571 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
573 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 120))
574 weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
575 matching_in_chain = counter(current_work.value['best_share_hash'], height)
576 shares_in_chain = my_shares & matching_in_chain
577 stale_shares = my_shares - matching_in_chain
578 print 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i stale) Peers: %i' % (
581 len(tracker.verified.shares),
583 weights.get(my_script, 0)/total_weight*100,
584 math.format(weights.get(my_script, 0)/total_weight*att_s),
585 len(shares_in_chain) + len(stale_shares),
588 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
589 #weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 100), 2**100)
590 #for k, v in weights.iteritems():
591 # print k.encode('hex'), v/total_weight
595 log.err(None, 'Fatal error:')
600 parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,))
601 parser.add_argument('--version', action='version', version=p2pool_init.__version__)
602 parser.add_argument('--namecoin',
603 help='use namecoin instead of bitcoin',
604 action='store_const', const=True, default=False, dest='namecoin')
605 parser.add_argument('--testnet',
606 help='use the testnet',
607 action='store_const', const=True, default=False, dest='testnet')
608 parser.add_argument('--debug',
609 help='debugging mode',
610 action='store_const', const=True, default=False, dest='debug')
611 parser.add_argument('-a', '--address',
612 help='generate to this address (defaults to requesting one from bitcoind)',
613 type=str, action='store', default=None, dest='address')
614 parser.add_argument('--charts',
615 help='generate charts on the web interface (requires PIL and pygame)',
616 action='store_const', const=True, default=False, dest='charts')
618 p2pool_group = parser.add_argument_group('p2pool interface')
619 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
620 help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
621 type=int, action='store', default=None, dest='p2pool_port')
622 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
623 help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
624 type=str, action='append', default=[], dest='p2pool_nodes')
625 parser.add_argument('-l', '--low-bandwidth',
626 help='trade lower bandwidth usage for higher latency (reduced efficiency)',
627 action='store_true', default=False, dest='low_bandwidth')
628 parser.add_argument('--disable-upnp',
629 help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
630 action='store_false', default=True, dest='upnp')
632 worker_group = parser.add_argument_group('worker interface')
633 worker_group.add_argument('-w', '--worker-port', metavar='PORT',
634 help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
635 type=int, action='store', default=9332, dest='worker_port')
637 bitcoind_group = parser.add_argument_group('bitcoind interface')
638 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
639 help='connect to a bitcoind at this address (default: 127.0.0.1)',
640 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
641 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
642 help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
643 type=int, action='store', default=8332, dest='bitcoind_rpc_port')
644 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
645 help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
646 type=int, action='store', default=None, dest='bitcoind_p2p_port')
648 bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
649 help='bitcoind RPC interface username',
650 type=str, action='store', dest='bitcoind_rpc_username')
651 bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
652 help='bitcoind RPC interface password',
653 type=str, action='store', dest='bitcoind_rpc_password')
655 args = parser.parse_args()
658 p2pool_init.DEBUG = True
659 class ReopeningFile(object):
660 def __init__(self, *open_args, **open_kwargs):
661 self.open_args, self.open_kwargs = open_args, open_kwargs
662 self.inner_file = open(*self.open_args, **self.open_kwargs)
664 self.inner_file.close()
665 self.inner_file = open(*self.open_args, **self.open_kwargs)
666 def write(self, data):
667 self.inner_file.write(data)
669 self.inner_file.flush()
670 class TeePipe(object):
671 def __init__(self, outputs):
672 self.outputs = outputs
673 def write(self, data):
674 for output in self.outputs:
677 for output in self.outputs:
679 class TimestampingPipe(object):
680 def __init__(self, inner_file):
681 self.inner_file = inner_file
684 def write(self, data):
685 buf = self.buf + data
686 lines = buf.split('\n')
687 for line in lines[:-1]:
688 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
689 self.inner_file.flush()
693 logfile = ReopeningFile(os.path.join(os.path.dirname(sys.argv[0]), 'debug.log'), 'w')
694 sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
695 if hasattr(signal, "SIGUSR1"):
696 def sigusr1(signum, frame):
697 print '''Caught SIGUSR1, closing 'debug.log'...'''
699 print '''...and reopened 'debug.log' after catching SIGUSR1.'''
700 signal.signal(signal.SIGUSR1, sigusr1)
703 (False, False): p2pool.Mainnet,
704 (False, True): p2pool.Testnet,
705 (True, False): p2pool.NamecoinMainnet,
706 (True, True): p2pool.NamecoinTestnet,
707 }[args.namecoin, args.testnet]
709 if args.bitcoind_p2p_port is None:
710 args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
712 if args.p2pool_port is None:
713 args.p2pool_port = args.net.P2P_PORT
715 if args.address is not None:
717 args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
719 raise ValueError('error parsing address: ' + repr(e))
721 args.pubkey_hash = None
723 reactor.callWhenRunning(main, args)