3 from __future__ import division
17 from twisted.internet import defer, reactor
18 from twisted.web import server, resource
19 from twisted.python import log
20 from nattraverso import portmapper, ipdiscover
22 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
23 from util import db, expiring_dict, jsonrpc, variable, deferral, math
24 from . import p2p, worker_interface, skiplists
25 import p2pool.data as p2pool
26 import p2pool as p2pool_init
28 @deferral.retry('Error getting work from bitcoind:', 3)
29 @defer.inlineCallbacks
30 def getwork(bitcoind):
31 # a block could arrive in between these two queries
32 getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
34 getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
36 # get rid of residual errors
37 getwork_df.addErrback(lambda fail: None)
38 height_df.addErrback(lambda fail: None)
39 defer.returnValue((getwork, height))
41 @deferral.retry('Error getting payout script from bitcoind:', 1)
42 @defer.inlineCallbacks
43 def get_payout_script(factory):
44 res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
45 if res['reply'] == 'success':
46 defer.returnValue(res['script'])
47 elif res['reply'] == 'denied':
48 defer.returnValue(None)
50 raise ValueError('Unexpected reply: %r' % (res,))
52 @deferral.retry('Error creating payout script:', 10)
53 @defer.inlineCallbacks
54 def get_payout_script2(bitcoind, net):
55 defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
57 @defer.inlineCallbacks
63 print 'p2pool (version %s)' % (p2pool_init.__version__,)
66 # connect to bitcoind over JSON-RPC and do initial getwork
67 url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
68 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
69 bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
70 temp_work, temp_height = yield getwork(bitcoind)
72 print ' Current block hash: %x height: %i' % (temp_work.previous_block, temp_height)
75 # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
76 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
77 factory = bitcoin.p2p.ClientFactory(args.net)
78 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
79 my_script = yield get_payout_script(factory)
80 if args.pubkey_hash is None:
82 print ' IP transaction denied ... falling back to sending to address.'
83 my_script = yield get_payout_script2(bitcoind, args.net)
85 my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
87 print ' Payout script:', my_script.encode('hex')
90 print 'Loading cached block headers...'
91 ht = bitcoin.p2p.HeightTracker(factory, args.net.HEADERSTORE_FILENAME)
92 print ' ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
95 tracker = p2pool.OkayTracker(args.net)
96 ss = p2pool.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.SHARESTORE_FILENAME), args.net)
97 known_verified = set()
98 print "Loading shares..."
99 for i, (mode, contents) in enumerate(ss.get_shares()):
101 if contents.hash in tracker.shares:
103 contents.shared = True
104 contents.stored = True
105 tracker.add(contents)
106 if len(tracker.shares) % 1000 == 0 and tracker.shares:
107 print " %i" % (len(tracker.shares),)
108 elif mode == 'verified_hash':
109 known_verified.add(contents)
111 raise AssertionError()
112 print " ...inserting %i verified shares..." % (len(known_verified),)
113 for h in known_verified:
114 if h not in tracker.shares:
116 tracker.verified.add(tracker.shares[h])
117 print " ...done loading %i shares!" % (len(tracker.shares),)
119 tracker.added.watch(ss.add_share)
120 tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
122 peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
124 # information affecting work that should trigger a long-polling update
125 current_work = variable.Variable(None)
126 # information affecting work that should not trigger a long-polling update
127 current_work2 = variable.Variable(None)
129 work_updated = variable.Event()
131 requested = expiring_dict.ExpiringDict(300)
133 @defer.inlineCallbacks
134 def set_real_work1():
135 work, height = yield getwork(bitcoind)
136 changed = work.previous_block != current_work.value['previous_block'] if current_work.value is not None else True
137 current_work.set(dict(
138 version=work.version,
139 previous_block=work.previous_block,
142 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
144 current_work2.set(dict(
145 clock_offset=time.time() - work.timestamp,
150 def set_real_work2():
151 best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
153 t = dict(current_work.value)
154 t['best_share_hash'] = best
158 for peer2, share_hash in desired:
159 if share_hash not in tracker.tails: # was received in the time tracker.think was running
161 last_request_time, count = requested.get(share_hash, (None, 0))
162 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
164 potential_peers = set()
165 for head in tracker.tails[share_hash]:
166 potential_peers.update(peer_heads.get(head, set()))
167 potential_peers = [peer for peer in potential_peers if peer.connected2]
168 if count == 0 and peer2 is not None and peer2.connected2:
171 peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
175 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
179 stops=list(set(tracker.heads) | set(
180 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
183 requested[share_hash] = t, count + 1
185 print 'Initializing work...'
186 yield set_real_work1()
191 start_time = time.time() - current_work2.value['clock_offset']
193 # setup p2p logic and join p2pool network
195 def share_share(share, ignore_peer=None):
196 for peer in p2p_node.peers.itervalues():
197 if peer is ignore_peer:
199 #if p2pool_init.DEBUG:
200 # print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
201 peer.send_shares([share])
204 def p2p_shares(shares, peer=None):
206 print 'Processing %i shares...' % (len(shares),)
210 if share.hash in tracker.shares:
211 #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
215 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
218 #for peer2, share_hash in desired:
219 # print 'Requesting parent share %x' % (share_hash,)
220 # peer2.send_getshares(hashes=[share_hash], parents=2000)
222 if share.bitcoin_hash <= share.header['target']:
224 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
226 if factory.conn.value is not None:
227 factory.conn.value.send_block(block=share.as_block(tracker, args.net))
229 print 'No bitcoind connection! Erp!'
231 if shares and peer is not None:
232 peer_heads.setdefault(shares[0].hash, set()).add(peer)
238 print '... done processing %i shares. Have: %i/~%i' % (len(shares), len(tracker.shares), 2*args.net.CHAIN_LENGTH)
240 def p2p_share_hashes(share_hashes, peer):
243 for share_hash in share_hashes:
244 if share_hash in tracker.shares:
246 last_request_time, count = requested.get(share_hash, (None, 0))
247 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
249 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
250 get_hashes.append(share_hash)
251 requested[share_hash] = t, count + 1
253 if share_hashes and peer is not None:
254 peer_heads.setdefault(share_hashes[0], set()).add(peer)
256 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
258 def p2p_get_shares(share_hashes, parents, stops, peer):
259 parents = min(parents, 1000//len(share_hashes))
262 for share_hash in share_hashes:
263 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
264 if share.hash in stops:
267 peer.send_shares(shares, full=True)
269 print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
273 ip, port = x.split(':')
276 return x, args.net.P2P_PORT
279 ('72.14.191.28', args.net.P2P_PORT),
280 ('62.204.197.159', args.net.P2P_PORT),
281 ('142.58.248.28', args.net.P2P_PORT),
282 ('94.23.34.145', args.net.P2P_PORT),
286 'dabuttonfactory.com',
289 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
291 log.err(None, 'Error resolving bootstrap node IP:')
294 current_work=current_work,
295 port=args.p2pool_port,
297 addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
298 mode=0 if args.low_bandwidth else 1,
299 preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
301 p2p_node.handle_shares = p2p_shares
302 p2p_node.handle_share_hashes = p2p_share_hashes
303 p2p_node.handle_get_shares = p2p_get_shares
307 # send share when the chain changes to their chain
308 def work_changed(new_work):
309 #print 'Work changed:', new_work
310 for share in tracker.get_chain_known(new_work['best_share_hash']):
313 share_share(share, share.peer)
314 current_work.changed.watch(work_changed)
319 @defer.inlineCallbacks
323 is_lan, lan_ip = yield ipdiscover.get_local_ip()
326 pm = yield portmapper.get_port_mapper()
327 yield pm._upnp.add_port_mapping(lan_ip, args.net.P2P_PORT, args.net.P2P_PORT, 'p2pool', 'TCP')
329 if p2pool_init.DEBUG:
330 log.err(None, "UPnP error:")
331 yield deferral.sleep(random.expovariate(1/120))
336 # start listening for workers with a JSON-RPC server
338 print 'Listening for workers on port %i...' % (args.worker_port,)
342 merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
343 run_identifier = struct.pack('<Q', random.randrange(2**64))
345 def compute(state, payout_script):
346 if payout_script is None:
347 payout_script = my_script
348 if state['best_share_hash'] is None and args.net.PERSIST:
349 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
350 pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
351 pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
354 for tx in pre_extra_txs:
355 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
356 if size + this_size > 500000:
361 # XXX assuming generate_tx is smallish here..
362 generate_tx = p2pool.generate_transaction(
364 previous_share_hash=state['best_share_hash'],
365 new_script=payout_script,
366 subsidy=(50*100000000 >> (state['height'] + 1)//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs),
367 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
368 block_target=state['target'],
371 print 'Generating! Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
372 #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
373 #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
374 transactions = [generate_tx] + [tx.tx for tx in extra_txs]
375 merkle_root = bitcoin.data.merkle_hash(transactions)
376 merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
378 timestamp = int(time.time() - current_work2.value['clock_offset'])
379 if state['best_share_hash'] is not None:
380 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
381 if timestamp2 > timestamp:
382 print 'Toff', timestamp2 - timestamp
383 timestamp = timestamp2
384 target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
385 times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
386 #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
387 return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
392 def got_response(data):
394 # match up with transactions
395 header = bitcoin.getwork.decode_data(data)
396 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
397 if transactions is None:
398 print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
400 block = dict(header=header, txs=transactions)
401 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
402 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
403 if factory.conn.value is not None:
404 factory.conn.value.send_block(block=block)
406 print 'No bitcoind connection! Erp!'
407 if hash_ <= block['header']['target']:
409 print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
411 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
413 print 'Worker submitted share with hash (%x) > target (%x)' % (hash_, target)
415 share = p2pool.Share.from_block(block)
416 my_shares.add(share.hash)
417 print 'GOT SHARE! %s prev %s age %.2fs' % (p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
418 good = share.previous_hash == current_work.value['best_share_hash']
419 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
421 # eg. good = share.hash == current_work.value['best_share_hash'] here
424 log.err(None, 'Error processing data received from worker:')
427 web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
430 if current_work.value['best_share_hash'] is not None:
431 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
432 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height, 720))
433 return json.dumps(att_s)
434 return json.dumps(None)
437 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
438 weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
440 for script in sorted(weights, key=lambda s: weights[s]):
441 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
442 return json.dumps(res)
444 class WebInterface(resource.Resource):
445 def __init__(self, func, mime_type):
446 self.func, self.mime_type = func, mime_type
448 def render_GET(self, request):
449 request.setHeader('Content-Type', self.mime_type)
452 web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
453 web_root.putChild('users', WebInterface(get_users, 'application/json'))
455 web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
457 reactor.listenTCP(args.worker_port, server.Site(web_root))
464 tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
465 get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
468 def __init__(self, tx, seen_at_block):
469 self.hash = bitcoin.data.tx_type.hash256(tx)
471 self.seen_at_block = seen_at_block
472 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
474 #print '%x %r' % (seen_at_block, tx)
475 #for mention in self.mentions:
476 # print '%x' % mention
478 self.parents_all_in_blocks = False
481 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
482 self._find_parents_in_blocks()
484 @defer.inlineCallbacks
485 def _find_parents_in_blocks(self):
486 for tx_in in self.tx['tx_ins']:
488 raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
491 self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
492 #print raw_transaction
493 if not raw_transaction['parent_blocks']:
495 self.parents_all_in_blocks = True
498 if not self.parents_all_in_blocks:
504 @defer.inlineCallbacks
507 assert isinstance(tx_hash, (int, long))
508 #print 'REQUESTING', tx_hash
509 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
511 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
513 log.err(None, 'Error handling tx:')
514 # disable for now, for testing impact on stales
515 #factory.new_tx.watch(new_tx)
517 def new_block(block_hash):
518 work_updated.happened()
519 factory.new_block.watch(new_block)
521 print 'Started successfully!'
524 ht.updated.watch(set_real_work2)
526 @defer.inlineCallbacks
529 flag = work_updated.get_deferred()
531 yield set_real_work1()
534 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/20))], fireOnOneCallback=True)
536 @defer.inlineCallbacks
543 yield deferral.sleep(random.expovariate(1/20))
548 counter = skiplists.CountsSkipList(tracker, run_identifier)
551 yield deferral.sleep(3)
553 if current_work.value['best_share_hash'] is not None:
554 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
556 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 120))
557 weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
558 matching_in_chain = counter(current_work.value['best_share_hash'], height)
559 shares_in_chain = my_shares & matching_in_chain
560 stale_shares = my_shares - matching_in_chain
561 print 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i stale) Peers: %i' % (
564 len(tracker.verified.shares),
566 weights.get(my_script, 0)/total_weight*100,
567 math.format(weights.get(my_script, 0)/total_weight*att_s),
568 len(shares_in_chain) + len(stale_shares),
571 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
572 #weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 100), 2**100)
573 #for k, v in weights.iteritems():
574 # print k.encode('hex'), v/total_weight
578 log.err(None, 'Fatal error:')
582 parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,))
583 parser.add_argument('--version', action='version', version=p2pool_init.__version__)
584 parser.add_argument('--testnet',
585 help='use the testnet',
586 action='store_const', const=p2pool.Testnet, default=p2pool.Mainnet, dest='net')
587 parser.add_argument('--debug',
588 help='debugging mode',
589 action='store_const', const=True, default=False, dest='debug')
590 parser.add_argument('-a', '--address',
591 help='generate to this address (defaults to requesting one from bitcoind)',
592 type=str, action='store', default=None, dest='address')
593 parser.add_argument('--charts',
594 help='generate charts on the web interface (requires PIL and pygame)',
595 action='store_const', const=True, default=False, dest='charts')
597 p2pool_group = parser.add_argument_group('p2pool interface')
598 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
599 help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
600 type=int, action='store', default=None, dest='p2pool_port')
601 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
602 help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
603 type=str, action='append', default=[], dest='p2pool_nodes')
604 parser.add_argument('-l', '--low-bandwidth',
605 help='trade lower bandwidth usage for higher latency (reduced efficiency)',
606 action='store_true', default=False, dest='low_bandwidth')
607 parser.add_argument('--disable-upnp',
608 help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
609 action='store_false', default=True, dest='upnp')
611 worker_group = parser.add_argument_group('worker interface')
612 worker_group.add_argument('-w', '--worker-port', metavar='PORT',
613 help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
614 type=int, action='store', default=9332, dest='worker_port')
616 bitcoind_group = parser.add_argument_group('bitcoind interface')
617 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
618 help='connect to a bitcoind at this address (default: 127.0.0.1)',
619 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
620 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
621 help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
622 type=int, action='store', default=8332, dest='bitcoind_rpc_port')
623 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
624 help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
625 type=int, action='store', default=None, dest='bitcoind_p2p_port')
627 bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
628 help='bitcoind RPC interface username',
629 type=str, action='store', dest='bitcoind_rpc_username')
630 bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
631 help='bitcoind RPC interface password',
632 type=str, action='store', dest='bitcoind_rpc_password')
634 args = parser.parse_args()
637 p2pool_init.DEBUG = True
638 class ReopeningFile(object):
639 def __init__(self, *open_args, **open_kwargs):
640 self.open_args, self.open_kwargs = open_args, open_kwargs
641 self.inner_file = open(*self.open_args, **self.open_kwargs)
643 self.inner_file.close()
644 self.inner_file = open(*self.open_args, **self.open_kwargs)
645 def write(self, data):
646 self.inner_file.write(data)
648 self.inner_file.flush()
649 class TeePipe(object):
650 def __init__(self, outputs):
651 self.outputs = outputs
652 def write(self, data):
653 for output in self.outputs:
656 for output in self.outputs:
658 class TimestampingPipe(object):
659 def __init__(self, inner_file):
660 self.inner_file = inner_file
663 def write(self, data):
664 buf = self.buf + data
665 lines = buf.split('\n')
666 for line in lines[:-1]:
667 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
668 self.inner_file.flush()
672 logfile = ReopeningFile(os.path.join(os.path.dirname(sys.argv[0]), 'debug.log'), 'w')
673 sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
674 if hasattr(signal, "SIGUSR1"):
675 def sigusr1(signum, frame):
676 print '''Caught SIGUSR1, closing 'debug.log'...'''
678 print '''...and reopened 'debug.log' after catching SIGUSR1.'''
679 signal.signal(signal.SIGUSR1, sigusr1)
681 if args.bitcoind_p2p_port is None:
682 args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
684 if args.p2pool_port is None:
685 args.p2pool_port = args.net.P2P_PORT
687 if args.address is not None:
689 args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
691 raise ValueError('error parsing address: ' + repr(e))
693 args.pubkey_hash = None
695 reactor.callWhenRunning(main, args)