3 from __future__ import division
18 from twisted.internet import defer, reactor, task
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
23 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
24 from util import db, expiring_dict, jsonrpc, variable, deferral, math
25 from . import p2p, worker_interface, skiplists
26 import p2pool.data as p2pool
27 import p2pool as p2pool_init
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind):
32 # a block could arrive in between these two queries
33 getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
35 getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
37 # get rid of residual errors
38 getwork_df.addErrback(lambda fail: None)
39 height_df.addErrback(lambda fail: None)
40 defer.returnValue((getwork, height))
42 @deferral.retry('Error getting payout script from bitcoind:', 1)
43 @defer.inlineCallbacks
44 def get_payout_script(factory):
45 res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
46 if res['reply'] == 'success':
47 defer.returnValue(res['script'])
48 elif res['reply'] == 'denied':
49 defer.returnValue(None)
51 raise ValueError('Unexpected reply: %r' % (res,))
53 @deferral.retry('Error creating payout script:', 10)
54 @defer.inlineCallbacks
55 def get_payout_script2(bitcoind, net):
56 defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
58 @defer.inlineCallbacks
64 print 'p2pool (version %s)' % (p2pool_init.__version__,)
67 # connect to bitcoind over JSON-RPC and do initial getwork
68 url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
69 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
70 bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
71 temp_work, temp_height = yield getwork(bitcoind)
73 print ' Current block hash: %x height: %i' % (temp_work.previous_block, temp_height)
76 # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
77 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
78 factory = bitcoin.p2p.ClientFactory(args.net)
79 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
80 my_script = yield get_payout_script(factory)
81 if args.pubkey_hash is None:
83 print ' IP transaction denied ... falling back to sending to address.'
84 my_script = yield get_payout_script2(bitcoind, args.net)
86 my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
88 print ' Payout script:', my_script.encode('hex')
91 print 'Loading cached block headers...'
92 ht = bitcoin.p2p.HeightTracker(factory, args.net.HEADERSTORE_FILENAME)
93 print ' ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
96 tracker = p2pool.OkayTracker(args.net)
97 ss = p2pool.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.SHARESTORE_FILENAME), args.net)
98 known_verified = set()
99 print "Loading shares..."
100 for i, (mode, contents) in enumerate(ss.get_shares()):
102 if contents.hash in tracker.shares:
104 contents.shared = True
105 contents.stored = True
106 tracker.add(contents)
107 if len(tracker.shares) % 1000 == 0 and tracker.shares:
108 print " %i" % (len(tracker.shares),)
109 elif mode == 'verified_hash':
110 known_verified.add(contents)
112 raise AssertionError()
113 print " ...inserting %i verified shares..." % (len(known_verified),)
114 for h in known_verified:
115 if h not in tracker.shares:
117 tracker.verified.add(tracker.shares[h])
118 print " ...done loading %i shares!" % (len(tracker.shares),)
120 tracker.added.watch(ss.add_share)
121 tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
123 peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
125 # information affecting work that should trigger a long-polling update
126 current_work = variable.Variable(None)
127 # information affecting work that should not trigger a long-polling update
128 current_work2 = variable.Variable(None)
130 work_updated = variable.Event()
132 requested = expiring_dict.ExpiringDict(300)
134 @defer.inlineCallbacks
135 def set_real_work1():
136 work, height = yield getwork(bitcoind)
137 changed = work.previous_block != current_work.value['previous_block'] if current_work.value is not None else True
138 current_work.set(dict(
139 version=work.version,
140 previous_block=work.previous_block,
143 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
145 current_work2.set(dict(
146 clock_offset=time.time() - work.timestamp,
151 def set_real_work2():
152 best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
154 t = dict(current_work.value)
155 t['best_share_hash'] = best
159 for peer2, share_hash in desired:
160 if share_hash not in tracker.tails: # was received in the time tracker.think was running
162 last_request_time, count = requested.get(share_hash, (None, 0))
163 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
165 potential_peers = set()
166 for head in tracker.tails[share_hash]:
167 potential_peers.update(peer_heads.get(head, set()))
168 potential_peers = [peer for peer in potential_peers if peer.connected2]
169 if count == 0 and peer2 is not None and peer2.connected2:
172 peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
176 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
180 stops=list(set(tracker.heads) | set(
181 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
184 requested[share_hash] = t, count + 1
186 print 'Initializing work...'
187 yield set_real_work1()
192 start_time = time.time() - current_work2.value['clock_offset']
194 # setup p2p logic and join p2pool network
196 def share_share(share, ignore_peer=None):
197 for peer in p2p_node.peers.itervalues():
198 if peer is ignore_peer:
200 #if p2pool_init.DEBUG:
201 # print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
202 peer.send_shares([share])
205 def p2p_shares(shares, peer=None):
207 print 'Processing %i shares...' % (len(shares),)
211 if share.hash in tracker.shares:
212 #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
216 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
219 #for peer2, share_hash in desired:
220 # print 'Requesting parent share %x' % (share_hash,)
221 # peer2.send_getshares(hashes=[share_hash], parents=2000)
223 if share.bitcoin_hash <= share.header['target']:
225 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
227 if factory.conn.value is not None:
228 factory.conn.value.send_block(block=share.as_block(tracker, args.net))
230 print 'No bitcoind connection! Erp!'
232 if shares and peer is not None:
233 peer_heads.setdefault(shares[0].hash, set()).add(peer)
239 print '... done processing %i shares. Have: %i/~%i' % (len(shares), len(tracker.shares), 2*args.net.CHAIN_LENGTH)
241 def p2p_share_hashes(share_hashes, peer):
244 for share_hash in share_hashes:
245 if share_hash in tracker.shares:
247 last_request_time, count = requested.get(share_hash, (None, 0))
248 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
250 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
251 get_hashes.append(share_hash)
252 requested[share_hash] = t, count + 1
254 if share_hashes and peer is not None:
255 peer_heads.setdefault(share_hashes[0], set()).add(peer)
257 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
259 def p2p_get_shares(share_hashes, parents, stops, peer):
260 parents = min(parents, 1000//len(share_hashes))
263 for share_hash in share_hashes:
264 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
265 if share.hash in stops:
268 peer.send_shares(shares, full=True)
270 print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
274 ip, port = x.split(':')
277 return x, args.net.P2P_PORT
280 ('72.14.191.28', args.net.P2P_PORT),
281 ('62.204.197.159', args.net.P2P_PORT),
282 ('142.58.248.28', args.net.P2P_PORT),
283 ('94.23.34.145', args.net.P2P_PORT),
287 'dabuttonfactory.com',
290 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
292 log.err(None, 'Error resolving bootstrap node IP:')
295 current_work=current_work,
296 port=args.p2pool_port,
298 addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
299 mode=0 if args.low_bandwidth else 1,
300 preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
302 p2p_node.handle_shares = p2p_shares
303 p2p_node.handle_share_hashes = p2p_share_hashes
304 p2p_node.handle_get_shares = p2p_get_shares
308 # send share when the chain changes to their chain
309 def work_changed(new_work):
310 #print 'Work changed:', new_work
311 for share in tracker.get_chain_known(new_work['best_share_hash']):
314 share_share(share, share.peer)
315 current_work.changed.watch(work_changed)
320 @defer.inlineCallbacks
324 is_lan, lan_ip = yield ipdiscover.get_local_ip()
327 pm = yield portmapper.get_port_mapper()
328 yield pm._upnp.add_port_mapping(lan_ip, args.net.P2P_PORT, args.net.P2P_PORT, 'p2pool', 'TCP')
330 if p2pool_init.DEBUG:
331 log.err(None, "UPnP error:")
332 yield deferral.sleep(random.expovariate(1/120))
337 # start listening for workers with a JSON-RPC server
339 print 'Listening for workers on port %i...' % (args.worker_port,)
343 merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
344 run_identifier = struct.pack('<Q', random.randrange(2**64))
346 def compute(state, payout_script):
347 if payout_script is None:
348 payout_script = my_script
349 if state['best_share_hash'] is None and args.net.PERSIST:
350 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
351 pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
352 pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
355 for tx in pre_extra_txs:
356 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
357 if size + this_size > 500000:
362 # XXX assuming generate_tx is smallish here..
363 generate_tx = p2pool.generate_transaction(
365 previous_share_hash=state['best_share_hash'],
366 new_script=payout_script,
367 subsidy=(50*100000000 >> (state['height'] + 1)//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs),
368 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
369 block_target=state['target'],
372 print 'New base for worker. Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
373 #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
374 #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
375 transactions = [generate_tx] + [tx.tx for tx in extra_txs]
376 merkle_root = bitcoin.data.merkle_hash(transactions)
377 merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
379 timestamp = int(time.time() - current_work2.value['clock_offset'])
380 if state['best_share_hash'] is not None:
381 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
382 if timestamp2 > timestamp:
383 print 'Toff', timestamp2 - timestamp
384 timestamp = timestamp2
385 target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
386 times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
387 #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
388 return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
393 def got_response(data):
395 # match up with transactions
396 header = bitcoin.getwork.decode_data(data)
397 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
398 if transactions is None:
399 print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
401 block = dict(header=header, txs=transactions)
402 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
403 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
404 if factory.conn.value is not None:
405 factory.conn.value.send_block(block=block)
407 print 'No bitcoind connection! Erp!'
408 if hash_ <= block['header']['target']:
410 print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
412 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
414 print 'Worker submitted share with hash > target:\nhash : %x\ntarget: %x' % (hash_, target)
416 share = p2pool.Share.from_block(block)
417 my_shares.add(share.hash)
418 print 'GOT SHARE! %s prev %s age %.2fs' % (p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
419 good = share.previous_hash == current_work.value['best_share_hash']
420 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
422 # eg. good = share.hash == current_work.value['best_share_hash'] here
425 log.err(None, 'Error processing data received from worker:')
428 web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
431 if current_work.value['best_share_hash'] is not None:
432 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
433 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height, 720))
434 return json.dumps(att_s)
435 return json.dumps(None)
438 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
439 weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
441 for script in sorted(weights, key=lambda s: weights[s]):
442 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
443 return json.dumps(res)
445 class WebInterface(resource.Resource):
446 def __init__(self, func, mime_type):
447 self.func, self.mime_type = func, mime_type
449 def render_GET(self, request):
450 request.setHeader('Content-Type', self.mime_type)
453 web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
454 web_root.putChild('users', WebInterface(get_users, 'application/json'))
456 web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
458 reactor.listenTCP(args.worker_port, server.Site(web_root))
465 tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
466 get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
469 def __init__(self, tx, seen_at_block):
470 self.hash = bitcoin.data.tx_type.hash256(tx)
472 self.seen_at_block = seen_at_block
473 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
475 #print '%x %r' % (seen_at_block, tx)
476 #for mention in self.mentions:
477 # print '%x' % mention
479 self.parents_all_in_blocks = False
482 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
483 self._find_parents_in_blocks()
485 @defer.inlineCallbacks
486 def _find_parents_in_blocks(self):
487 for tx_in in self.tx['tx_ins']:
489 raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
492 self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
493 #print raw_transaction
494 if not raw_transaction['parent_blocks']:
496 self.parents_all_in_blocks = True
499 if not self.parents_all_in_blocks:
505 @defer.inlineCallbacks
508 assert isinstance(tx_hash, (int, long))
509 #print 'REQUESTING', tx_hash
510 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
512 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
514 log.err(None, 'Error handling tx:')
515 # disable for now, for testing impact on stales
516 #factory.new_tx.watch(new_tx)
518 def new_block(block_hash):
519 work_updated.happened()
520 factory.new_block.watch(new_block)
522 print 'Started successfully!'
525 ht.updated.watch(set_real_work2)
527 @defer.inlineCallbacks
530 flag = work_updated.get_deferred()
532 yield set_real_work1()
535 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/20))], fireOnOneCallback=True)
537 @defer.inlineCallbacks
544 yield deferral.sleep(random.expovariate(1/20))
550 def watchdog_handler(signum, frame):
551 print "Watchdog timer went off at:"
552 traceback.print_exc()
554 signal.signal(signal.SIGALRM, watchdog_handler)
555 task.LoopingCall(signal.alarm, 30).start(1)
558 counter = skiplists.CountsSkipList(tracker, run_identifier)
561 yield deferral.sleep(3)
563 if current_work.value['best_share_hash'] is not None:
564 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
566 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 120))
567 weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
568 matching_in_chain = counter(current_work.value['best_share_hash'], height)
569 shares_in_chain = my_shares & matching_in_chain
570 stale_shares = my_shares - matching_in_chain
571 print 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i stale) Peers: %i' % (
574 len(tracker.verified.shares),
576 weights.get(my_script, 0)/total_weight*100,
577 math.format(weights.get(my_script, 0)/total_weight*att_s),
578 len(shares_in_chain) + len(stale_shares),
581 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
582 #weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 100), 2**100)
583 #for k, v in weights.iteritems():
584 # print k.encode('hex'), v/total_weight
588 log.err(None, 'Fatal error:')
592 parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,))
593 parser.add_argument('--version', action='version', version=p2pool_init.__version__)
594 parser.add_argument('--testnet',
595 help='use the testnet',
596 action='store_const', const=p2pool.Testnet, default=p2pool.Mainnet, dest='net')
597 parser.add_argument('--debug',
598 help='debugging mode',
599 action='store_const', const=True, default=False, dest='debug')
600 parser.add_argument('-a', '--address',
601 help='generate to this address (defaults to requesting one from bitcoind)',
602 type=str, action='store', default=None, dest='address')
603 parser.add_argument('--charts',
604 help='generate charts on the web interface (requires PIL and pygame)',
605 action='store_const', const=True, default=False, dest='charts')
607 p2pool_group = parser.add_argument_group('p2pool interface')
608 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
609 help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
610 type=int, action='store', default=None, dest='p2pool_port')
611 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
612 help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
613 type=str, action='append', default=[], dest='p2pool_nodes')
614 parser.add_argument('-l', '--low-bandwidth',
615 help='trade lower bandwidth usage for higher latency (reduced efficiency)',
616 action='store_true', default=False, dest='low_bandwidth')
617 parser.add_argument('--disable-upnp',
618 help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
619 action='store_false', default=True, dest='upnp')
621 worker_group = parser.add_argument_group('worker interface')
622 worker_group.add_argument('-w', '--worker-port', metavar='PORT',
623 help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
624 type=int, action='store', default=9332, dest='worker_port')
626 bitcoind_group = parser.add_argument_group('bitcoind interface')
627 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
628 help='connect to a bitcoind at this address (default: 127.0.0.1)',
629 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
630 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
631 help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
632 type=int, action='store', default=8332, dest='bitcoind_rpc_port')
633 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
634 help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
635 type=int, action='store', default=None, dest='bitcoind_p2p_port')
637 bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
638 help='bitcoind RPC interface username',
639 type=str, action='store', dest='bitcoind_rpc_username')
640 bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
641 help='bitcoind RPC interface password',
642 type=str, action='store', dest='bitcoind_rpc_password')
644 args = parser.parse_args()
647 p2pool_init.DEBUG = True
648 class ReopeningFile(object):
649 def __init__(self, *open_args, **open_kwargs):
650 self.open_args, self.open_kwargs = open_args, open_kwargs
651 self.inner_file = open(*self.open_args, **self.open_kwargs)
653 self.inner_file.close()
654 self.inner_file = open(*self.open_args, **self.open_kwargs)
655 def write(self, data):
656 self.inner_file.write(data)
658 self.inner_file.flush()
659 class TeePipe(object):
660 def __init__(self, outputs):
661 self.outputs = outputs
662 def write(self, data):
663 for output in self.outputs:
666 for output in self.outputs:
668 class TimestampingPipe(object):
669 def __init__(self, inner_file):
670 self.inner_file = inner_file
673 def write(self, data):
674 buf = self.buf + data
675 lines = buf.split('\n')
676 for line in lines[:-1]:
677 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
678 self.inner_file.flush()
682 logfile = ReopeningFile(os.path.join(os.path.dirname(sys.argv[0]), 'debug.log'), 'w')
683 sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
684 if hasattr(signal, "SIGUSR1"):
685 def sigusr1(signum, frame):
686 print '''Caught SIGUSR1, closing 'debug.log'...'''
688 print '''...and reopened 'debug.log' after catching SIGUSR1.'''
689 signal.signal(signal.SIGUSR1, sigusr1)
691 if args.bitcoind_p2p_port is None:
692 args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
694 if args.p2pool_port is None:
695 args.p2pool_port = args.net.P2P_PORT
697 if args.address is not None:
699 args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
701 raise ValueError('error parsing address: ' + repr(e))
703 args.pubkey_hash = None
705 reactor.callWhenRunning(main, args)