3 from __future__ import division
17 from twisted.internet import defer, reactor
18 from twisted.web import server, resource
19 from twisted.python import log
20 from nattraverso import portmapper, ipdiscover
22 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
23 from util import db, expiring_dict, jsonrpc, variable, deferral, math
24 from . import p2p, worker_interface, skiplists
25 import p2pool.data as p2pool
26 import p2pool as p2pool_init
28 @deferral.retry('Error getting work from bitcoind:', 3)
29 @defer.inlineCallbacks
30 def getwork(bitcoind):
31 # a block could arrive in between these two queries
32 getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
34 getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
36 # get rid of residual errors
37 getwork_df.addErrback(lambda fail: None)
38 height_df.addErrback(lambda fail: None)
39 defer.returnValue((getwork, height))
41 @deferral.retry('Error getting payout script from bitcoind:', 1)
42 @defer.inlineCallbacks
43 def get_payout_script(factory):
44 res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
45 if res['reply'] == 'success':
46 defer.returnValue(res['script'])
47 elif res['reply'] == 'denied':
48 defer.returnValue(None)
50 raise ValueError('Unexpected reply: %r' % (res,))
52 @deferral.retry('Error creating payout script:', 10)
53 @defer.inlineCallbacks
54 def get_payout_script2(bitcoind, net):
55 defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
57 @defer.inlineCallbacks
63 print 'p2pool (version %s)' % (p2pool_init.__version__,)
66 # connect to bitcoind over JSON-RPC and do initial getwork
67 url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
68 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
69 bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
70 temp_work, temp_height = yield getwork(bitcoind)
72 print ' Current block hash: %x height: %i' % (temp_work.previous_block, temp_height)
75 # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
76 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
77 factory = bitcoin.p2p.ClientFactory(args.net)
78 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
79 my_script = yield get_payout_script(factory)
80 if args.pubkey_hash is None:
82 print ' IP transaction denied ... falling back to sending to address.'
83 my_script = yield get_payout_script2(bitcoind, args.net)
85 my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
87 print ' Payout script:', my_script.encode('hex')
90 print 'Loading cached block headers...'
91 ht = bitcoin.p2p.HeightTracker(factory, args.net.HEADERSTORE_FILENAME)
92 print ' ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
95 tracker = p2pool.OkayTracker(args.net)
96 ss = p2pool.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.SHARESTORE_FILENAME), args.net)
97 print "Loading shares..."
98 for i, share in enumerate(ss.get_shares()):
99 if share.hash in tracker.shares:
103 tracker.add(share, known_verified=True)
104 if len(tracker.shares) % 1000 == 0 and tracker.shares:
105 print " %i" % (len(tracker.shares),)
108 tracker.verified.added.watch(ss.add_share)
110 peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
112 # information affecting work that should trigger a long-polling update
113 current_work = variable.Variable(None)
114 # information affecting work that should not trigger a long-polling update
115 current_work2 = variable.Variable(None)
117 work_updated = variable.Event()
119 requested = expiring_dict.ExpiringDict(300)
121 @defer.inlineCallbacks
122 def set_real_work1():
123 work, height = yield getwork(bitcoind)
124 changed = work.previous_block != current_work.value['previous_block'] if current_work.value is not None else True
125 current_work.set(dict(
126 version=work.version,
127 previous_block=work.previous_block,
130 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
132 current_work2.set(dict(
133 clock_offset=time.time() - work.timestamp,
138 def set_real_work2():
139 best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
141 t = dict(current_work.value)
142 t['best_share_hash'] = best
146 for peer2, share_hash in desired:
147 if share_hash not in tracker.tails: # was received in the time tracker.think was running
149 last_request_time, count = requested.get(share_hash, (None, 0))
150 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
152 potential_peers = set()
153 for head in tracker.tails[share_hash]:
154 potential_peers.update(peer_heads.get(head, set()))
155 potential_peers = [peer for peer in potential_peers if peer.connected2]
156 if count == 0 and peer2 is not None and peer2.connected2:
159 peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
163 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
167 stops=list(set(tracker.heads) | set(
168 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
171 requested[share_hash] = t, count + 1
173 print 'Initializing work...'
174 yield set_real_work1()
179 start_time = time.time() - current_work2.value['clock_offset']
181 # setup p2p logic and join p2pool network
183 def share_share(share, ignore_peer=None):
184 for peer in p2p_node.peers.itervalues():
185 if peer is ignore_peer:
187 #if p2pool_init.DEBUG:
188 # print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
189 peer.send_shares([share])
192 def p2p_shares(shares, peer=None):
194 print 'Processing %i shares...' % (len(shares),)
198 if share.hash in tracker.shares:
199 #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
203 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
206 #for peer2, share_hash in desired:
207 # print 'Requesting parent share %x' % (share_hash,)
208 # peer2.send_getshares(hashes=[share_hash], parents=2000)
210 if share.bitcoin_hash <= share.header['target']:
212 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
214 if factory.conn.value is not None:
215 factory.conn.value.send_block(block=share.as_block(tracker, args.net))
217 print 'No bitcoind connection! Erp!'
219 if shares and peer is not None:
220 peer_heads.setdefault(shares[0].hash, set()).add(peer)
226 print '... done processing %i shares. Have: %i/~%i' % (len(shares), len(tracker.shares), 2*args.net.CHAIN_LENGTH)
228 def p2p_share_hashes(share_hashes, peer):
231 for share_hash in share_hashes:
232 if share_hash in tracker.shares:
234 last_request_time, count = requested.get(share_hash, (None, 0))
235 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
237 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
238 get_hashes.append(share_hash)
239 requested[share_hash] = t, count + 1
241 if share_hashes and peer is not None:
242 peer_heads.setdefault(share_hashes[0], set()).add(peer)
244 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
246 def p2p_get_shares(share_hashes, parents, stops, peer):
247 parents = min(parents, 1000//len(share_hashes))
250 for share_hash in share_hashes:
251 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
252 if share.hash in stops:
255 peer.send_shares(shares, full=True)
257 print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
261 ip, port = x.split(':')
264 return x, args.net.P2P_PORT
267 ('72.14.191.28', args.net.P2P_PORT),
268 ('62.204.197.159', args.net.P2P_PORT),
269 ('142.58.248.28', args.net.P2P_PORT),
270 ('94.23.34.145', args.net.P2P_PORT),
274 'dabuttonfactory.com',
277 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
279 log.err(None, 'Error resolving bootstrap node IP:')
282 current_work=current_work,
283 port=args.p2pool_port,
285 addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
286 mode=0 if args.low_bandwidth else 1,
287 preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
289 p2p_node.handle_shares = p2p_shares
290 p2p_node.handle_share_hashes = p2p_share_hashes
291 p2p_node.handle_get_shares = p2p_get_shares
295 # send share when the chain changes to their chain
296 def work_changed(new_work):
297 #print 'Work changed:', new_work
298 for share in tracker.get_chain_known(new_work['best_share_hash']):
301 share_share(share, share.peer)
302 current_work.changed.watch(work_changed)
307 @defer.inlineCallbacks
311 is_lan, lan_ip = yield ipdiscover.get_local_ip()
314 pm = yield portmapper.get_port_mapper()
315 yield pm._upnp.add_port_mapping(lan_ip, args.net.P2P_PORT, args.net.P2P_PORT, 'p2pool', 'TCP')
317 if p2pool_init.DEBUG:
318 log.err(None, "UPnP error:")
319 yield deferral.sleep(random.expovariate(1/120))
324 # start listening for workers with a JSON-RPC server
326 print 'Listening for workers on port %i...' % (args.worker_port,)
330 merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
331 run_identifier = struct.pack('<Q', random.randrange(2**64))
333 def compute(state, payout_script):
334 if payout_script is None:
335 payout_script = my_script
336 if state['best_share_hash'] is None and args.net.PERSIST:
337 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
338 pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
339 pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
342 for tx in pre_extra_txs:
343 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
344 if size + this_size > 500000:
349 # XXX assuming generate_tx is smallish here..
350 generate_tx = p2pool.generate_transaction(
352 previous_share_hash=state['best_share_hash'],
353 new_script=payout_script,
354 subsidy=(50*100000000 >> (state['height'] + 1)//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs),
355 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
356 block_target=state['target'],
359 print 'Generating! Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
360 #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
361 #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
362 transactions = [generate_tx] + [tx.tx for tx in extra_txs]
363 merkle_root = bitcoin.data.merkle_hash(transactions)
364 merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
366 timestamp = int(time.time() - current_work2.value['clock_offset'])
367 if state['best_share_hash'] is not None:
368 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
369 if timestamp2 > timestamp:
370 print 'Toff', timestamp2 - timestamp
371 timestamp = timestamp2
372 target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
373 times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
374 #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
375 return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
380 def got_response(data):
382 # match up with transactions
383 header = bitcoin.getwork.decode_data(data)
384 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
385 if transactions is None:
386 print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
388 block = dict(header=header, txs=transactions)
389 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
390 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
391 if factory.conn.value is not None:
392 factory.conn.value.send_block(block=block)
394 print 'No bitcoind connection! Erp!'
395 if hash_ <= block['header']['target']:
397 print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
399 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
401 print 'Received invalid share from worker - %x/%x' % (hash_, target)
403 share = p2pool.Share.from_block(block)
404 my_shares.add(share.hash)
405 print 'GOT SHARE! %s prev %s age %.2fs' % (p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
406 good = share.previous_hash == current_work.value['best_share_hash']
407 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
409 # eg. good = share.hash == current_work.value['best_share_hash'] here
412 log.err(None, 'Error processing data received from worker:')
415 web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
418 if current_work.value['best_share_hash'] is not None:
419 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
420 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height, 720))
421 return json.dumps(att_s)
422 return json.dumps(None)
425 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
426 weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
428 for script in sorted(weights, key=lambda s: weights[s]):
429 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
430 return json.dumps(res)
432 class WebInterface(resource.Resource):
433 def __init__(self, func, mime_type):
434 self.func, self.mime_type = func, mime_type
436 def render_GET(self, request):
437 request.setHeader('Content-Type', self.mime_type)
440 web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
441 web_root.putChild('users', WebInterface(get_users, 'application/json'))
443 web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
445 reactor.listenTCP(args.worker_port, server.Site(web_root))
452 tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
453 get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
456 def __init__(self, tx, seen_at_block):
457 self.hash = bitcoin.data.tx_type.hash256(tx)
459 self.seen_at_block = seen_at_block
460 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
462 #print '%x %r' % (seen_at_block, tx)
463 #for mention in self.mentions:
464 # print '%x' % mention
466 self.parents_all_in_blocks = False
469 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
470 self._find_parents_in_blocks()
472 @defer.inlineCallbacks
473 def _find_parents_in_blocks(self):
474 for tx_in in self.tx['tx_ins']:
476 raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
479 self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
480 #print raw_transaction
481 if not raw_transaction['parent_blocks']:
483 self.parents_all_in_blocks = True
486 if not self.parents_all_in_blocks:
492 @defer.inlineCallbacks
495 assert isinstance(tx_hash, (int, long))
496 #print 'REQUESTING', tx_hash
497 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
499 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
501 log.err(None, 'Error handling tx:')
502 # disable for now, for testing impact on stales
503 #factory.new_tx.watch(new_tx)
505 def new_block(block_hash):
506 work_updated.happened()
507 factory.new_block.watch(new_block)
509 print 'Started successfully!'
512 ht.updated.watch(set_real_work2)
514 @defer.inlineCallbacks
517 flag = work_updated.get_deferred()
519 yield set_real_work1()
522 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/20))], fireOnOneCallback=True)
524 @defer.inlineCallbacks
531 yield deferral.sleep(random.expovariate(1/20))
536 counter = skiplists.CountsSkipList(tracker, run_identifier)
539 yield deferral.sleep(3)
541 if current_work.value['best_share_hash'] is not None:
542 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
544 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 120))
545 weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
546 matching_in_chain = counter(current_work.value['best_share_hash'], height)
547 shares_in_chain = my_shares & matching_in_chain
548 stale_shares = my_shares - matching_in_chain
549 print 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i stale) Peers: %i' % (
552 len(tracker.verified.shares),
554 weights.get(my_script, 0)/total_weight*100,
555 math.format(weights.get(my_script, 0)/total_weight*att_s),
556 len(shares_in_chain) + len(stale_shares),
559 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
560 #weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 100), 2**100)
561 #for k, v in weights.iteritems():
562 # print k.encode('hex'), v/total_weight
566 log.err(None, 'Fatal error:')
570 parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,))
571 parser.add_argument('--version', action='version', version=p2pool_init.__version__)
572 parser.add_argument('--testnet',
573 help='use the testnet',
574 action='store_const', const=p2pool.Testnet, default=p2pool.Mainnet, dest='net')
575 parser.add_argument('--debug',
576 help='debugging mode',
577 action='store_const', const=True, default=False, dest='debug')
578 parser.add_argument('-a', '--address',
579 help='generate to this address (defaults to requesting one from bitcoind)',
580 type=str, action='store', default=None, dest='address')
581 parser.add_argument('--charts',
582 help='generate charts on the web interface (requires PIL and pygame)',
583 action='store_const', const=True, default=False, dest='charts')
585 p2pool_group = parser.add_argument_group('p2pool interface')
586 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
587 help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
588 type=int, action='store', default=None, dest='p2pool_port')
589 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
590 help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
591 type=str, action='append', default=[], dest='p2pool_nodes')
592 parser.add_argument('-l', '--low-bandwidth',
593 help='trade lower bandwidth usage for higher latency (reduced efficiency)',
594 action='store_true', default=False, dest='low_bandwidth')
595 parser.add_argument('--disable-upnp',
596 help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
597 action='store_false', default=True, dest='upnp')
599 worker_group = parser.add_argument_group('worker interface')
600 worker_group.add_argument('-w', '--worker-port', metavar='PORT',
601 help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
602 type=int, action='store', default=9332, dest='worker_port')
604 bitcoind_group = parser.add_argument_group('bitcoind interface')
605 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
606 help='connect to a bitcoind at this address (default: 127.0.0.1)',
607 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
608 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
609 help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
610 type=int, action='store', default=8332, dest='bitcoind_rpc_port')
611 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
612 help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
613 type=int, action='store', default=None, dest='bitcoind_p2p_port')
615 bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
616 help='bitcoind RPC interface username',
617 type=str, action='store', dest='bitcoind_rpc_username')
618 bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
619 help='bitcoind RPC interface password',
620 type=str, action='store', dest='bitcoind_rpc_password')
622 args = parser.parse_args()
625 p2pool_init.DEBUG = True
626 class ReopeningFile(object):
627 def __init__(self, *open_args, **open_kwargs):
628 self.open_args, self.open_kwargs = open_args, open_kwargs
629 self.inner_file = open(*self.open_args, **self.open_kwargs)
631 self.inner_file.close()
632 self.inner_file = open(*self.open_args, **self.open_kwargs)
633 def write(self, data):
634 self.inner_file.write(data)
636 self.inner_file.flush()
637 class TeePipe(object):
638 def __init__(self, outputs):
639 self.outputs = outputs
640 def write(self, data):
641 for output in self.outputs:
644 for output in self.outputs:
646 class TimestampingPipe(object):
647 def __init__(self, inner_file):
648 self.inner_file = inner_file
651 def write(self, data):
652 buf = self.buf + data
653 lines = buf.split('\n')
654 for line in lines[:-1]:
655 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
656 self.inner_file.flush()
660 logfile = ReopeningFile(os.path.join(os.path.dirname(sys.argv[0]), 'debug.log'), 'w')
661 sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
662 if hasattr(signal, "SIGUSR1"):
663 def sigusr1(signum, frame):
664 print '''Caught SIGUSR1, closing 'debug.log'...'''
666 print '''...and reopened 'debug.log' after catching SIGUSR1.'''
667 signal.signal(signal.SIGUSR1, sigusr1)
669 if args.bitcoind_p2p_port is None:
670 args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
672 if args.p2pool_port is None:
673 args.p2pool_port = args.net.P2P_PORT
675 if args.address is not None:
677 args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
679 raise ValueError('error parsing address: ' + repr(e))
681 args.pubkey_hash = None
683 reactor.callWhenRunning(main, args)