3 from __future__ import division
18 from twisted.internet import defer, reactor, task
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
23 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
24 from util import db, expiring_dict, jsonrpc, variable, deferral, math
25 from . import p2p, worker_interface, skiplists
26 import p2pool.data as p2pool
27 import p2pool as p2pool_init
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind):
32 # a block could arrive in between these two queries
33 getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
35 getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
37 # get rid of residual errors
38 getwork_df.addErrback(lambda fail: None)
39 height_df.addErrback(lambda fail: None)
40 defer.returnValue((getwork, height))
42 @deferral.retry('Error getting payout script from bitcoind:', 1)
43 @defer.inlineCallbacks
44 def get_payout_script(factory):
45 res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
46 if res['reply'] == 'success':
47 defer.returnValue(res['script'])
48 elif res['reply'] == 'denied':
49 defer.returnValue(None)
51 raise ValueError('Unexpected reply: %r' % (res,))
53 @deferral.retry('Error creating payout script:', 10)
54 @defer.inlineCallbacks
55 def get_payout_script2(bitcoind, net):
56 defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
58 @defer.inlineCallbacks
64 print 'p2pool (version %s)' % (p2pool_init.__version__,)
67 # connect to bitcoind over JSON-RPC and do initial getwork
68 url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
69 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
70 bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
71 temp_work, temp_height = yield getwork(bitcoind)
73 print ' Current block hash: %x height: %i' % (temp_work.previous_block, temp_height)
76 # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
77 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
78 factory = bitcoin.p2p.ClientFactory(args.net)
79 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
80 my_script = yield get_payout_script(factory)
81 if args.pubkey_hash is None:
83 print ' IP transaction denied ... falling back to sending to address.'
84 my_script = yield get_payout_script2(bitcoind, args.net)
86 my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
88 print ' Payout script:', my_script.encode('hex')
91 print 'Loading cached block headers...'
92 ht = bitcoin.p2p.HeightTracker(factory, args.net.HEADERSTORE_FILENAME)
93 print ' ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
96 tracker = p2pool.OkayTracker(args.net)
97 ss = p2pool.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.SHARESTORE_FILENAME), args.net)
98 known_verified = set()
99 print "Loading shares..."
100 for i, (mode, contents) in enumerate(ss.get_shares()):
102 if contents.hash in tracker.shares:
104 contents.shared = True
105 contents.stored = True
106 tracker.add(contents)
107 if len(tracker.shares) % 1000 == 0 and tracker.shares:
108 print " %i" % (len(tracker.shares),)
109 elif mode == 'verified_hash':
110 known_verified.add(contents)
112 raise AssertionError()
113 print " ...inserting %i verified shares..." % (len(known_verified),)
114 for h in known_verified:
115 if h not in tracker.shares:
117 tracker.verified.add(tracker.shares[h])
118 print " ...done loading %i shares!" % (len(tracker.shares),)
120 tracker.added.watch(ss.add_share)
121 tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
123 peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
125 # information affecting work that should trigger a long-polling update
126 current_work = variable.Variable(None)
127 # information affecting work that should not trigger a long-polling update
128 current_work2 = variable.Variable(None)
130 work_updated = variable.Event()
132 requested = expiring_dict.ExpiringDict(300)
134 @defer.inlineCallbacks
135 def set_real_work1():
136 work, height = yield getwork(bitcoind)
137 changed = work.previous_block != current_work.value['previous_block'] if current_work.value is not None else True
138 current_work.set(dict(
139 version=work.version,
140 previous_block=work.previous_block,
143 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
145 current_work2.set(dict(
146 clock_offset=time.time() - work.timestamp,
151 def set_real_work2():
152 best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
154 t = dict(current_work.value)
155 t['best_share_hash'] = best
159 for peer2, share_hash in desired:
160 if share_hash not in tracker.tails: # was received in the time tracker.think was running
162 last_request_time, count = requested.get(share_hash, (None, 0))
163 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
165 potential_peers = set()
166 for head in tracker.tails[share_hash]:
167 potential_peers.update(peer_heads.get(head, set()))
168 potential_peers = [peer for peer in potential_peers if peer.connected2]
169 if count == 0 and peer2 is not None and peer2.connected2:
172 peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
176 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
180 stops=list(set(tracker.heads) | set(
181 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
184 requested[share_hash] = t, count + 1
186 print 'Initializing work...'
187 yield set_real_work1()
192 start_time = time.time() - current_work2.value['clock_offset']
194 # setup p2p logic and join p2pool network
196 def share_share(share, ignore_peer=None):
197 for peer in p2p_node.peers.itervalues():
198 if peer is ignore_peer:
200 #if p2pool_init.DEBUG:
201 # print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
202 peer.send_shares([share])
205 def p2p_shares(shares, peer=None):
207 print 'Processing %i shares...' % (len(shares),)
211 if share.hash in tracker.shares:
212 #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
216 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
219 #for peer2, share_hash in desired:
220 # print 'Requesting parent share %x' % (share_hash,)
221 # peer2.send_getshares(hashes=[share_hash], parents=2000)
223 if share.bitcoin_hash <= share.header['target']:
225 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
227 if factory.conn.value is not None:
228 factory.conn.value.send_block(block=share.as_block(tracker, args.net))
230 print 'No bitcoind connection! Erp!'
232 if shares and peer is not None:
233 peer_heads.setdefault(shares[0].hash, set()).add(peer)
239 print '... done processing %i shares. Have: %i/~%i' % (len(shares), len(tracker.shares), 2*args.net.CHAIN_LENGTH)
241 def p2p_share_hashes(share_hashes, peer):
244 for share_hash in share_hashes:
245 if share_hash in tracker.shares:
247 last_request_time, count = requested.get(share_hash, (None, 0))
248 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
250 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
251 get_hashes.append(share_hash)
252 requested[share_hash] = t, count + 1
254 if share_hashes and peer is not None:
255 peer_heads.setdefault(share_hashes[0], set()).add(peer)
257 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
259 def p2p_get_shares(share_hashes, parents, stops, peer):
260 parents = min(parents, 1000//len(share_hashes))
263 for share_hash in share_hashes:
264 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
265 if share.hash in stops:
268 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
269 peer.send_shares(shares, full=True)
271 print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
275 ip, port = x.split(':')
278 return x, args.net.P2P_PORT
281 ('72.14.191.28', args.net.P2P_PORT),
282 ('62.204.197.159', args.net.P2P_PORT),
283 ('142.58.248.28', args.net.P2P_PORT),
284 ('94.23.34.145', args.net.P2P_PORT),
288 'dabuttonfactory.com',
291 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
293 log.err(None, 'Error resolving bootstrap node IP:')
296 current_work=current_work,
297 port=args.p2pool_port,
299 addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
300 mode=0 if args.low_bandwidth else 1,
301 preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
303 p2p_node.handle_shares = p2p_shares
304 p2p_node.handle_share_hashes = p2p_share_hashes
305 p2p_node.handle_get_shares = p2p_get_shares
309 # send share when the chain changes to their chain
310 def work_changed(new_work):
311 #print 'Work changed:', new_work
312 for share in tracker.get_chain_known(new_work['best_share_hash']):
315 share_share(share, share.peer)
316 current_work.changed.watch(work_changed)
321 @defer.inlineCallbacks
325 is_lan, lan_ip = yield ipdiscover.get_local_ip()
328 pm = yield portmapper.get_port_mapper()
329 yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
331 if p2pool_init.DEBUG:
332 log.err(None, "UPnP error:")
333 yield deferral.sleep(random.expovariate(1/120))
338 # start listening for workers with a JSON-RPC server
340 print 'Listening for workers on port %i...' % (args.worker_port,)
344 merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
345 run_identifier = struct.pack('<Q', random.randrange(2**64))
347 def compute(state, payout_script):
348 if payout_script is None:
349 payout_script = my_script
350 if state['best_share_hash'] is None and args.net.PERSIST:
351 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
352 if len(p2p_node.peers) == 0 and args.net.PERSIST:
353 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
354 pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
355 pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
358 for tx in pre_extra_txs:
359 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
360 if size + this_size > 500000:
365 # XXX assuming generate_tx is smallish here..
366 generate_tx = p2pool.generate_transaction(
368 previous_share_hash=state['best_share_hash'],
369 new_script=payout_script,
370 subsidy=(50*100000000 >> (state['height'] + 1)//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs),
371 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
372 block_target=state['target'],
375 print 'Generating! Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
376 #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
377 #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
378 transactions = [generate_tx] + [tx.tx for tx in extra_txs]
379 merkle_root = bitcoin.data.merkle_hash(transactions)
380 merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
382 timestamp = int(time.time() - current_work2.value['clock_offset'])
383 if state['best_share_hash'] is not None:
384 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
385 if timestamp2 > timestamp:
386 print 'Toff', timestamp2 - timestamp
387 timestamp = timestamp2
388 target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
389 times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
390 #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
391 return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
396 def got_response(data):
398 # match up with transactions
399 header = bitcoin.getwork.decode_data(data)
400 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
401 if transactions is None:
402 print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
404 block = dict(header=header, txs=transactions)
405 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
406 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
407 if factory.conn.value is not None:
408 factory.conn.value.send_block(block=block)
410 print 'No bitcoind connection! Erp!'
411 if hash_ <= block['header']['target']:
413 print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
415 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
417 print 'Received invalid share from worker - %x/%x' % (hash_, target)
419 share = p2pool.Share.from_block(block)
420 my_shares.add(share.hash)
421 print 'GOT SHARE! %s prev %s age %.2fs' % (p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
422 good = share.previous_hash == current_work.value['best_share_hash']
423 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
425 # eg. good = share.hash == current_work.value['best_share_hash'] here
428 log.err(None, 'Error processing data received from worker:')
431 web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
434 if current_work.value['best_share_hash'] is not None:
435 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
436 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height, 720))
437 return json.dumps(att_s)
438 return json.dumps(None)
441 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
442 weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
444 for script in sorted(weights, key=lambda s: weights[s]):
445 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
446 return json.dumps(res)
448 class WebInterface(resource.Resource):
449 def __init__(self, func, mime_type):
450 self.func, self.mime_type = func, mime_type
452 def render_GET(self, request):
453 request.setHeader('Content-Type', self.mime_type)
456 web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
457 web_root.putChild('users', WebInterface(get_users, 'application/json'))
459 web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
461 reactor.listenTCP(args.worker_port, server.Site(web_root))
468 tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
469 get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
472 def __init__(self, tx, seen_at_block):
473 self.hash = bitcoin.data.tx_type.hash256(tx)
475 self.seen_at_block = seen_at_block
476 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
478 #print '%x %r' % (seen_at_block, tx)
479 #for mention in self.mentions:
480 # print '%x' % mention
482 self.parents_all_in_blocks = False
485 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
486 self._find_parents_in_blocks()
488 @defer.inlineCallbacks
489 def _find_parents_in_blocks(self):
490 for tx_in in self.tx['tx_ins']:
492 raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
495 self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
496 #print raw_transaction
497 if not raw_transaction['parent_blocks']:
499 self.parents_all_in_blocks = True
502 if not self.parents_all_in_blocks:
508 @defer.inlineCallbacks
511 assert isinstance(tx_hash, (int, long))
512 #print 'REQUESTING', tx_hash
513 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
515 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
517 log.err(None, 'Error handling tx:')
518 # disable for now, for testing impact on stales
519 #factory.new_tx.watch(new_tx)
521 def new_block(block_hash):
522 work_updated.happened()
523 factory.new_block.watch(new_block)
525 print 'Started successfully!'
528 ht.updated.watch(set_real_work2)
530 @defer.inlineCallbacks
533 flag = work_updated.get_deferred()
535 yield set_real_work1()
538 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/20))], fireOnOneCallback=True)
540 @defer.inlineCallbacks
547 yield deferral.sleep(random.expovariate(1/20))
553 def watchdog_handler(signum, frame):
554 print "Watchdog timer went off at:"
555 traceback.print_exc()
557 signal.signal(signal.SIGALRM, watchdog_handler)
558 task.LoopingCall(signal.alarm, 30).start(1)
561 counter = skiplists.CountsSkipList(tracker, run_identifier)
564 yield deferral.sleep(3)
566 if current_work.value['best_share_hash'] is not None:
567 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
569 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 120))
570 weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
571 matching_in_chain = counter(current_work.value['best_share_hash'], height)
572 shares_in_chain = my_shares & matching_in_chain
573 stale_shares = my_shares - matching_in_chain
574 print 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i stale) Peers: %i' % (
577 len(tracker.verified.shares),
579 weights.get(my_script, 0)/total_weight*100,
580 math.format(weights.get(my_script, 0)/total_weight*att_s),
581 len(shares_in_chain) + len(stale_shares),
584 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
585 #weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 100), 2**100)
586 #for k, v in weights.iteritems():
587 # print k.encode('hex'), v/total_weight
591 log.err(None, 'Fatal error:')
595 parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,))
596 parser.add_argument('--version', action='version', version=p2pool_init.__version__)
597 parser.add_argument('--namecoin',
598 help='use namecoin instead of bitcoin',
599 action='store_const', const=True, default=False, dest='namecoin')
600 parser.add_argument('--testnet',
601 help='use the testnet',
602 action='store_const', const=True, default=False, dest='testnet')
603 parser.add_argument('--debug',
604 help='debugging mode',
605 action='store_const', const=True, default=False, dest='debug')
606 parser.add_argument('-a', '--address',
607 help='generate to this address (defaults to requesting one from bitcoind)',
608 type=str, action='store', default=None, dest='address')
609 parser.add_argument('--charts',
610 help='generate charts on the web interface (requires PIL and pygame)',
611 action='store_const', const=True, default=False, dest='charts')
613 p2pool_group = parser.add_argument_group('p2pool interface')
614 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
615 help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
616 type=int, action='store', default=None, dest='p2pool_port')
617 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
618 help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
619 type=str, action='append', default=[], dest='p2pool_nodes')
620 parser.add_argument('-l', '--low-bandwidth',
621 help='trade lower bandwidth usage for higher latency (reduced efficiency)',
622 action='store_true', default=False, dest='low_bandwidth')
623 parser.add_argument('--disable-upnp',
624 help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
625 action='store_false', default=True, dest='upnp')
627 worker_group = parser.add_argument_group('worker interface')
628 worker_group.add_argument('-w', '--worker-port', metavar='PORT',
629 help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
630 type=int, action='store', default=9332, dest='worker_port')
632 bitcoind_group = parser.add_argument_group('bitcoind interface')
633 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
634 help='connect to a bitcoind at this address (default: 127.0.0.1)',
635 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
636 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
637 help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
638 type=int, action='store', default=8332, dest='bitcoind_rpc_port')
639 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
640 help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
641 type=int, action='store', default=None, dest='bitcoind_p2p_port')
643 bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
644 help='bitcoind RPC interface username',
645 type=str, action='store', dest='bitcoind_rpc_username')
646 bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
647 help='bitcoind RPC interface password',
648 type=str, action='store', dest='bitcoind_rpc_password')
650 args = parser.parse_args()
653 p2pool_init.DEBUG = True
654 class ReopeningFile(object):
655 def __init__(self, *open_args, **open_kwargs):
656 self.open_args, self.open_kwargs = open_args, open_kwargs
657 self.inner_file = open(*self.open_args, **self.open_kwargs)
659 self.inner_file.close()
660 self.inner_file = open(*self.open_args, **self.open_kwargs)
661 def write(self, data):
662 self.inner_file.write(data)
664 self.inner_file.flush()
665 class TeePipe(object):
666 def __init__(self, outputs):
667 self.outputs = outputs
668 def write(self, data):
669 for output in self.outputs:
672 for output in self.outputs:
674 class TimestampingPipe(object):
675 def __init__(self, inner_file):
676 self.inner_file = inner_file
679 def write(self, data):
680 buf = self.buf + data
681 lines = buf.split('\n')
682 for line in lines[:-1]:
683 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
684 self.inner_file.flush()
688 logfile = ReopeningFile(os.path.join(os.path.dirname(sys.argv[0]), 'debug.log'), 'w')
689 sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
690 if hasattr(signal, "SIGUSR1"):
691 def sigusr1(signum, frame):
692 print '''Caught SIGUSR1, closing 'debug.log'...'''
694 print '''...and reopened 'debug.log' after catching SIGUSR1.'''
695 signal.signal(signal.SIGUSR1, sigusr1)
698 (False, False): p2pool.Mainnet,
699 (False, True): p2pool.Testnet,
700 (True, False): p2pool.NamecoinMainnet,
701 (True, True): p2pool.NamecoinTestnet,
702 }[args.namecoin, args.testnet]
704 if args.bitcoind_p2p_port is None:
705 args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
707 if args.p2pool_port is None:
708 args.p2pool_port = args.net.P2P_PORT
710 if args.address is not None:
712 args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
714 raise ValueError('error parsing address: ' + repr(e))
716 args.pubkey_hash = None
718 reactor.callWhenRunning(main, args)