3 from __future__ import division
18 from twisted.internet import defer, reactor, task, threads
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
23 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
24 from util import db, expiring_dict, jsonrpc, variable, deferral, math
25 from . import p2p, worker_interface, skiplists
26 import p2pool.data as p2pool
27 import p2pool as p2pool_init
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind, ht, net):
33 work = yield bitcoind.rpc_getmemorypool()
34 defer.returnValue(dict(
35 version=work['version'],
36 previous_block_hash=int(work['previousblockhash'], 16),
37 transactions=[bitcoin.data.tx_type.unpack(x.decode('hex')) for x in work['transactions']],
38 subsidy=work['coinbasevalue'],
40 target=bitcoin.data.FloatingInteger(work['bits']),
42 except jsonrpc.Error, e:
46 print "---> Update your bitcoind to support the 'getmemorypool' RPC call. Not including transactions in generated blocks! <---"
47 work = bitcoin.getwork.BlockAttempt.from_getwork((yield bitcoind.rpc_getwork()))
49 subsidy = net.BITCOIN_SUBSIDY_FUNC(ht.getHeight(work.previous_block))
51 subsidy = net.BITCOIN_SUBSIDY_FUNC(1000)
53 defer.returnValue(dict(
55 previous_block_hash=work.previous_block,
59 target=work.block_target,
62 @deferral.retry('Error getting payout script from bitcoind:', 1)
63 @defer.inlineCallbacks
64 def get_payout_script(factory):
65 res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
66 if res['reply'] == 'success':
67 defer.returnValue(res['script'])
68 elif res['reply'] == 'denied':
69 defer.returnValue(None)
71 raise ValueError('Unexpected reply: %r' % (res,))
73 @deferral.retry('Error creating payout script:', 10)
74 @defer.inlineCallbacks
75 def get_payout_script2(bitcoind, net):
76 defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
78 @defer.inlineCallbacks
84 print 'p2pool (version %s)' % (p2pool_init.__version__,)
87 # connect to bitcoind over JSON-RPC and do initial getwork
88 url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
89 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
90 bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
91 good = yield deferral.retry('Error while checking bitcoind identity:', 1)(args.net.BITCOIN_RPC_CHECK)(bitcoind)
93 print " Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
95 temp_work = yield deferral.retry('Error while testing getwork:', 1)(defer.inlineCallbacks(lambda: defer.returnValue(bitcoin.getwork.BlockAttempt.from_getwork((yield bitcoind.rpc_getwork())))))()
97 print ' Current block hash: %x' % (temp_work.previous_block,)
100 # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
101 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
102 factory = bitcoin.p2p.ClientFactory(args.net)
103 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
104 my_script = yield get_payout_script(factory)
105 if args.pubkey_hash is None:
106 if my_script is None:
107 print ' IP transaction denied ... falling back to sending to address.'
108 my_script = yield get_payout_script2(bitcoind, args.net)
110 my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
112 print ' Payout script:', bitcoin.data.script2_to_human(my_script, args.net)
115 print 'Loading cached block headers...'
116 ht = bitcoin.p2p.HeightTracker(factory, args.net.NAME + '_headers.dat')
117 print ' ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
120 tracker = p2pool.OkayTracker(args.net)
121 ss = p2pool.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.NAME + '_shares.'), args.net)
122 known_verified = set()
123 print "Loading shares..."
124 for i, (mode, contents) in enumerate(ss.get_shares()):
126 if contents.hash in tracker.shares:
128 contents.shared = True
129 contents.stored = True
130 contents.time_seen = 0
131 tracker.add(contents)
132 if len(tracker.shares) % 1000 == 0 and tracker.shares:
133 print " %i" % (len(tracker.shares),)
134 elif mode == 'verified_hash':
135 known_verified.add(contents)
137 raise AssertionError()
138 print " ...inserting %i verified shares..." % (len(known_verified),)
139 for h in known_verified:
140 if h not in tracker.shares:
141 ss.forget_verified_share(h)
143 tracker.verified.add(tracker.shares[h])
144 print " ...done loading %i shares!" % (len(tracker.shares),)
146 tracker.added.watch(lambda share: ss.add_share(share))
147 tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
148 tracker.removed.watch(lambda share: ss.forget_share(share.hash))
149 tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
151 peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
153 # information affecting work that should trigger a long-polling update
154 current_work = variable.Variable(None)
155 # information affecting work that should not trigger a long-polling update
156 current_work2 = variable.Variable(None)
158 work_updated = variable.Event()
160 requested = expiring_dict.ExpiringDict(300)
162 @defer.inlineCallbacks
163 def set_real_work1():
164 work = yield getwork(bitcoind, ht, args.net)
165 changed = work['previous_block_hash'] != current_work.value['previous_block'] if current_work.value is not None else True
166 current_work.set(dict(
167 version=work['version'],
168 previous_block=work['previous_block_hash'],
169 target=work['target'],
170 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
172 current_work2.set(dict(
173 transactions=work['transactions'],
174 subsidy=work['subsidy'],
175 clock_offset=time.time() - work['time'],
176 last_update=time.time(),
181 def set_real_work2():
182 best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
184 t = dict(current_work.value)
185 t['best_share_hash'] = best
189 for peer2, share_hash in desired:
190 if share_hash not in tracker.tails: # was received in the time tracker.think was running
192 last_request_time, count = requested.get(share_hash, (None, 0))
193 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
195 potential_peers = set()
196 for head in tracker.tails[share_hash]:
197 potential_peers.update(peer_heads.get(head, set()))
198 potential_peers = [peer for peer in potential_peers if peer.connected2]
199 if count == 0 and peer2 is not None and peer2.connected2:
202 peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
206 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
210 stops=list(set(tracker.heads) | set(
211 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
214 requested[share_hash] = t, count + 1
216 print 'Initializing work...'
217 yield set_real_work1()
222 start_time = time.time() - current_work2.value['clock_offset']
224 # setup p2p logic and join p2pool network
226 def share_share(share, ignore_peer=None):
227 for peer in p2p_node.peers.itervalues():
228 if peer is ignore_peer:
230 #if p2pool_init.DEBUG:
231 # print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
232 peer.send_shares([share])
235 def p2p_shares(shares, peer=None):
237 print 'Processing %i shares...' % (len(shares),)
241 if share.hash in tracker.shares:
242 #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
247 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
251 if shares and peer is not None:
252 peer_heads.setdefault(shares[0].hash, set()).add(peer)
258 print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*args.net.CHAIN_LENGTH)
260 @tracker.verified.added.watch
262 if share.bitcoin_hash <= share.header['target']:
264 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
266 if factory.conn.value is not None:
267 factory.conn.value.send_block(block=share.as_block(tracker, args.net))
269 print 'No bitcoind connection! Erp!'
271 def p2p_share_hashes(share_hashes, peer):
274 for share_hash in share_hashes:
275 if share_hash in tracker.shares:
277 last_request_time, count = requested.get(share_hash, (None, 0))
278 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
280 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
281 get_hashes.append(share_hash)
282 requested[share_hash] = t, count + 1
284 if share_hashes and peer is not None:
285 peer_heads.setdefault(share_hashes[0], set()).add(peer)
287 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
289 def p2p_get_shares(share_hashes, parents, stops, peer):
290 parents = min(parents, 1000//len(share_hashes))
293 for share_hash in share_hashes:
294 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
295 if share.hash in stops:
298 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
299 peer.send_shares(shares, full=True)
301 print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
305 ip, port = x.split(':')
308 return x, args.net.P2P_PORT
311 ('72.14.191.28', args.net.P2P_PORT),
312 ('62.204.197.159', args.net.P2P_PORT),
313 ('142.58.248.28', args.net.P2P_PORT),
314 ('94.23.34.145', args.net.P2P_PORT),
318 'dabuttonfactory.com',
321 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
323 log.err(None, 'Error resolving bootstrap node IP:')
326 current_work=current_work,
327 port=args.p2pool_port,
329 addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.NAME),
330 mode=0 if args.low_bandwidth else 1,
331 preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
333 p2p_node.handle_shares = p2p_shares
334 p2p_node.handle_share_hashes = p2p_share_hashes
335 p2p_node.handle_get_shares = p2p_get_shares
339 # send share when the chain changes to their chain
340 def work_changed(new_work):
341 #print 'Work changed:', new_work
342 for share in tracker.get_chain_known(new_work['best_share_hash']):
345 share_share(share, share.peer)
346 current_work.changed.watch(work_changed)
351 @defer.inlineCallbacks
355 is_lan, lan_ip = yield ipdiscover.get_local_ip()
357 pm = yield portmapper.get_port_mapper()
358 yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
359 except defer.TimeoutError:
362 if p2pool_init.DEBUG:
363 log.err(None, "UPnP error:")
364 yield deferral.sleep(random.expovariate(1/120))
369 # start listening for workers with a JSON-RPC server
371 print 'Listening for workers on port %i...' % (args.worker_port,)
375 merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
376 run_identifier = struct.pack('<Q', random.randrange(2**64))
378 share_counter = skiplists.CountsSkipList(tracker, run_identifier)
379 removed_unstales = set()
380 def get_share_counts(doa=False):
381 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
382 matching_in_chain = share_counter(current_work.value['best_share_hash'], max(0, height - 1)) | removed_unstales
383 shares_in_chain = my_shares & matching_in_chain
384 stale_shares = my_shares - matching_in_chain
386 stale_doa_shares = stale_shares & doa_shares
387 stale_not_doa_shares = stale_shares - stale_doa_shares
388 return len(shares_in_chain) + len(stale_shares), len(stale_doa_shares), len(stale_not_doa_shares)
389 return len(shares_in_chain) + len(stale_shares), len(stale_shares)
390 @tracker.verified.removed.watch
392 if share.hash in my_shares and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
393 removed_unstales.add(share.hash)
395 def compute(state, payout_script):
396 if payout_script is None:
397 payout_script = my_script
398 if state['best_share_hash'] is None and args.net.PERSIST:
399 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
400 if len(p2p_node.peers) == 0 and args.net.PERSIST:
401 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
402 if time.time() > current_work2.value['last_update'] + 60:
403 raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
405 # XXX assuming generate_tx is smallish here..
406 def get_stale_frac():
407 shares, stale_shares = get_share_counts()
410 frac = stale_shares/shares
411 return 2*struct.pack('<H', int(65535*frac + .5))
412 subsidy = current_work2.value['subsidy']
413 generate_tx = p2pool.generate_transaction(
415 previous_share_hash=state['best_share_hash'],
416 new_script=payout_script,
418 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)) + get_stale_frac(),
419 block_target=state['target'],
422 print 'New work for worker! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], (generate_tx['tx_outs'][-1]['value']-subsidy//200)*1e-8, args.net.BITCOIN_SYMBOL, subsidy*1e-8, args.net.BITCOIN_SYMBOL, len(current_work2.value['transactions']))
423 #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
424 #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
425 transactions = [generate_tx] + list(current_work2.value['transactions'])
426 merkle_root = bitcoin.data.merkle_hash(transactions)
427 merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
429 timestamp = int(time.time() - current_work2.value['clock_offset'])
430 if state['best_share_hash'] is not None:
431 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
432 if timestamp2 > timestamp:
433 print 'Toff', timestamp2 - timestamp
434 timestamp = timestamp2
435 target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
436 times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
437 #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
438 return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
444 def got_response(data, user):
446 # match up with transactions
447 header = bitcoin.getwork.decode_data(data)
448 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
449 if transactions is None:
450 print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
452 block = dict(header=header, txs=transactions)
453 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
454 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
455 if factory.conn.value is not None:
456 factory.conn.value.send_block(block=block)
458 print 'No bitcoind connection! Erp!'
459 if hash_ <= block['header']['target']:
461 print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
463 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
465 print 'Worker submitted share with hash > target:\nhash : %x\ntarget: %x' % (hash_, target)
467 share = p2pool.Share.from_block(block)
468 my_shares.add(share.hash)
469 if share.previous_hash != current_work.value['best_share_hash']:
470 doa_shares.add(share.hash)
471 print 'GOT SHARE! %s %s prev %s age %.2fs' % (user, p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
472 good = share.previous_hash == current_work.value['best_share_hash']
473 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
475 # eg. good = share.hash == current_work.value['best_share_hash'] here
478 log.err(None, 'Error processing data received from worker:')
481 web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
484 if current_work.value['best_share_hash'] is not None:
485 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
486 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 720))
487 fracs = [read_stale_frac(share) for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if read_stale_frac(share) is not None]
488 return json.dumps(int(att_s / (1. - (math.median(fracs) if fracs else 0))))
489 return json.dumps(None)
492 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
493 weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
495 for script in sorted(weights, key=lambda s: weights[s]):
496 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
497 return json.dumps(res)
499 class WebInterface(resource.Resource):
500 def __init__(self, func, mime_type):
501 self.func, self.mime_type = func, mime_type
503 def render_GET(self, request):
504 request.setHeader('Content-Type', self.mime_type)
507 web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
508 web_root.putChild('users', WebInterface(get_users, 'application/json'))
510 web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
512 reactor.listenTCP(args.worker_port, server.Site(web_root))
519 # do new getwork when a block is heard on the p2p interface
521 def new_block(block_hash):
522 work_updated.happened()
523 factory.new_block.watch(new_block)
525 print 'Started successfully!'
528 ht.updated.watch(set_real_work2)
530 @defer.inlineCallbacks
533 flag = work_updated.get_deferred()
535 yield set_real_work1()
538 yield defer.DeferredList([flag, deferral.sleep(random.uniform(1, 10))], fireOnOneCallback=True)
540 @defer.inlineCallbacks
547 yield deferral.sleep(random.expovariate(1/20))
553 if hasattr(signal, 'SIGALRM'):
554 def watchdog_handler(signum, frame):
555 print 'Watchdog timer went off at:'
556 traceback.print_stack()
558 signal.signal(signal.SIGALRM, watchdog_handler)
559 task.LoopingCall(signal.alarm, 30).start(1)
562 def read_stale_frac(share):
563 if len(share.nonce) != 20:
565 a, b = struct.unpack("<HH", share.nonce[-4:])
571 yield deferral.sleep(3)
573 if time.time() > current_work2.value['last_update'] + 60:
574 print '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
575 if current_work.value['best_share_hash'] is not None:
576 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
578 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 720))
579 weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**100)
580 shares, stale_doa_shares, stale_not_doa_shares = get_share_counts(True)
581 stale_shares = stale_doa_shares + stale_not_doa_shares
582 fracs = [read_stale_frac(share) for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if read_stale_frac(share) is not None]
583 print 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i' % (
584 math.format(int(att_s / (1. - (math.median(fracs) if fracs else 0)))),
586 len(tracker.verified.shares),
588 weights.get(my_script, 0)/total_weight*100,
589 math.format(int(weights.get(my_script, 0)*att_s//total_weight / (1. - (math.median(fracs) if fracs else 0)))),
591 stale_not_doa_shares,
594 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
596 med = math.median(fracs)
597 print 'Median stale proportion:', med
599 print ' Own:', stale_shares/shares
601 print ' Own efficiency: %.02f%%' % (100*(1 - stale_shares/shares)/(1 - med),)
607 log.err(None, 'Fatal error:')
612 class FixedArgumentParser(argparse.ArgumentParser):
613 def _read_args_from_files(self, arg_strings):
614 # expand arguments referencing files
616 for arg_string in arg_strings:
618 # for regular arguments, just add them back into the list
619 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
620 new_arg_strings.append(arg_string)
622 # replace arguments referencing files with the file content
625 args_file = open(arg_string[1:])
628 for arg_line in args_file.read().splitlines():
629 for arg in self.convert_arg_line_to_args(arg_line):
630 arg_strings.append(arg)
631 arg_strings = self._read_args_from_files(arg_strings)
632 new_arg_strings.extend(arg_strings)
636 err = sys.exc_info()[1]
639 # return the modified argument list
640 return new_arg_strings
642 parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,), fromfile_prefix_chars='@')
643 parser.convert_arg_line_to_args = lambda arg_line: (arg for arg in arg_line.split() if arg.strip())
644 parser.add_argument('--version', action='version', version=p2pool_init.__version__)
645 parser.add_argument('--net',
646 help='use specified network (default: bitcoin)',
647 action='store', choices=sorted(x for x in p2pool.nets if 'testnet' not in x), default='bitcoin', dest='net_name')
648 parser.add_argument('--testnet',
649 help='''use the network's testnet''',
650 action='store_const', const=True, default=False, dest='testnet')
651 parser.add_argument('--debug',
652 help='debugging mode',
653 action='store_const', const=True, default=False, dest='debug')
654 parser.add_argument('-a', '--address',
655 help='generate to this address (defaults to requesting one from bitcoind)',
656 type=str, action='store', default=None, dest='address')
657 parser.add_argument('--charts',
658 help='generate charts on the web interface (requires PIL and pygame)',
659 action='store_const', const=True, default=False, dest='charts')
660 parser.add_argument('--logfile',
661 help='''log to specific file (defaults to <network_name>.log in run_p2pool.py's directory)''',
662 type=str, action='store', default=None, dest='logfile')
664 p2pool_group = parser.add_argument_group('p2pool interface')
665 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
666 help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
667 type=int, action='store', default=None, dest='p2pool_port')
668 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
669 help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
670 type=str, action='append', default=[], dest='p2pool_nodes')
671 parser.add_argument('-l', '--low-bandwidth',
672 help='trade lower bandwidth usage for higher latency (reduced efficiency)',
673 action='store_true', default=False, dest='low_bandwidth')
674 parser.add_argument('--disable-upnp',
675 help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
676 action='store_false', default=True, dest='upnp')
678 worker_group = parser.add_argument_group('worker interface')
679 worker_group.add_argument('-w', '--worker-port', metavar='PORT',
680 help='listen on PORT for RPC connections from miners asking for work and providing responses (default: bitcoin: 9332 namecoin: 9331 ixcoin: 9330 i0coin: 9329, +10000 for testnets)',
681 type=int, action='store', default=None, dest='worker_port')
683 bitcoind_group = parser.add_argument_group('bitcoind interface')
684 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
685 help='connect to a bitcoind at this address (default: 127.0.0.1)',
686 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
687 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
688 help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332, 8338 for ixcoin)',
689 type=int, action='store', default=None, dest='bitcoind_rpc_port')
690 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
691 help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
692 type=int, action='store', default=None, dest='bitcoind_p2p_port')
694 bitcoind_group.add_argument(metavar='BITCOIND_RPCUSER',
695 help='bitcoind RPC interface username (default: empty)',
696 type=str, action='store', default='', nargs='?', dest='bitcoind_rpc_username')
697 bitcoind_group.add_argument(metavar='BITCOIND_RPCPASSWORD',
698 help='bitcoind RPC interface password',
699 type=str, action='store', dest='bitcoind_rpc_password')
701 args = parser.parse_args()
704 p2pool_init.DEBUG = True
706 if args.logfile is None:
707 args.logfile = os.path.join(os.path.dirname(sys.argv[0]), args.net_name + ('_testnet' if args.testnet else '') + '.log')
709 class LogFile(object):
710 def __init__(self, filename):
711 self.filename = filename
712 self.inner_file = None
715 if self.inner_file is not None:
716 self.inner_file.close()
717 open(self.filename, 'a').close()
718 f = open(self.filename, 'rb')
719 f.seek(0, os.SEEK_END)
721 if length > 100*1000*1000:
722 f.seek(-1000*1000, os.SEEK_END)
724 if f.read(1) in ('', '\n'):
728 f = open(self.filename, 'wb')
731 self.inner_file = open(self.filename, 'a')
732 def write(self, data):
733 self.inner_file.write(data)
735 self.inner_file.flush()
736 class TeePipe(object):
737 def __init__(self, outputs):
738 self.outputs = outputs
739 def write(self, data):
740 for output in self.outputs:
743 for output in self.outputs:
745 class TimestampingPipe(object):
746 def __init__(self, inner_file):
747 self.inner_file = inner_file
750 def write(self, data):
751 buf = self.buf + data
752 lines = buf.split('\n')
753 for line in lines[:-1]:
754 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
755 self.inner_file.flush()
759 logfile = LogFile(args.logfile)
760 sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
761 if hasattr(signal, "SIGUSR1"):
762 def sigusr1(signum, frame):
763 print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
765 print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
766 signal.signal(signal.SIGUSR1, sigusr1)
767 task.LoopingCall(logfile.reopen).start(5)
769 args.net = p2pool.nets[args.net_name + ('_testnet' if args.testnet else '')]
771 if args.bitcoind_rpc_port is None:
772 args.bitcoind_rpc_port = args.net.BITCOIN_RPC_PORT
774 if args.bitcoind_p2p_port is None:
775 args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
777 if args.p2pool_port is None:
778 args.p2pool_port = args.net.P2P_PORT
780 if args.worker_port is None:
781 args.worker_port = args.net.WORKER_PORT
783 if args.address is not None:
785 args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
787 raise ValueError('error parsing address: ' + repr(e))
789 args.pubkey_hash = None
791 reactor.callWhenRunning(main, args)