1 from __future__ import division
16 from twisted.internet import defer, error, reactor, protocol, task
17 from twisted.web import server, resource
18 from twisted.python import log
19 from nattraverso import portmapper, ipdiscover
21 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
22 from bitcoin import worker_interface
23 from util import expiring_dict, jsonrpc, variable, deferral, math, logging, pack
24 from . import p2p, networks, graphs
25 import p2pool, p2pool.data as p2pool_data
27 @deferral.retry('Error getting work from bitcoind:', 3)
28 @defer.inlineCallbacks
29 def getwork(bitcoind):
31 work = yield bitcoind.rpc_getmemorypool()
32 except jsonrpc.Error, e:
33 if e.code == -32601: # Method not found
34 print >>sys.stderr, 'Error: Bitcoin version too old! Upgrade to v0.5 or newer!'
35 raise deferral.RetrySilentlyException()
37 packed_transactions = [x.decode('hex') for x in work['transactions']]
38 defer.returnValue(dict(
39 version=work['version'],
40 previous_block_hash=int(work['previousblockhash'], 16),
41 transactions=map(bitcoin_data.tx_type.unpack, packed_transactions),
42 merkle_branch=bitcoin_data.calculate_merkle_branch([0] + map(bitcoin_data.hash256, packed_transactions), 0),
43 subsidy=work['coinbasevalue'],
45 bits=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
46 coinbaseflags=work['coinbaseflags'].decode('hex') if 'coinbaseflags' in work else ''.join(x.decode('hex') for x in work['coinbaseaux'].itervalues()) if 'coinbaseaux' in work else '',
49 @defer.inlineCallbacks
50 def main(args, net, datadir_path, merged_urls, worker_endpoint):
52 print 'p2pool (version %s)' % (p2pool.__version__,)
58 print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
61 # connect to bitcoind over JSON-RPC and do initial getmemorypool
62 url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
63 print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
64 bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
65 good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.PARENT.RPC_CHECK)(bitcoind)
67 print >>sys.stderr, " Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
69 temp_work = yield getwork(bitcoind)
71 print ' Current block hash: %x' % (temp_work['previous_block_hash'],)
74 # connect to bitcoind over bitcoin-p2p
75 print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
76 factory = bitcoin_p2p.ClientFactory(net.PARENT)
77 reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
78 yield factory.getProtocol() # waits until handshake is successful
82 print 'Determining payout address...'
83 if args.pubkey_hash is None:
84 address_path = os.path.join(datadir_path, 'cached_payout_address')
86 if os.path.exists(address_path):
87 with open(address_path, 'rb') as f:
88 address = f.read().strip('\r\n')
89 print ' Loaded cached address: %s...' % (address,)
93 if address is not None:
94 res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))()
95 if not res['isvalid'] or not res['ismine']:
96 print ' Cached address is either invalid or not controlled by local bitcoind!'
100 print ' Getting payout address from bitcoind...'
101 address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()
103 with open(address_path, 'wb') as f:
106 my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(address, net.PARENT)
108 my_pubkey_hash = args.pubkey_hash
109 print ' ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)
112 my_share_hashes = set()
113 my_doa_share_hashes = set()
115 tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes)
116 shared_share_hashes = set()
117 ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
118 known_verified = set()
120 print "Loading shares..."
121 for i, (mode, contents) in enumerate(ss.get_shares()):
123 if contents.hash in tracker.shares:
125 shared_share_hashes.add(contents.hash)
126 contents.time_seen = 0
127 tracker.add(contents)
128 if len(tracker.shares) % 1000 == 0 and tracker.shares:
129 print " %i" % (len(tracker.shares),)
130 elif mode == 'verified_hash':
131 known_verified.add(contents)
133 raise AssertionError()
134 print " ...inserting %i verified shares..." % (len(known_verified),)
135 for h in known_verified:
136 if h not in tracker.shares:
137 ss.forget_verified_share(h)
139 tracker.verified.add(tracker.shares[h])
140 print " ...done loading %i shares!" % (len(tracker.shares),)
142 tracker.removed.watch(lambda share: ss.forget_share(share.hash))
143 tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
144 tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
146 peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
148 pre_current_work = variable.Variable(None)
149 pre_merged_work = variable.Variable({})
150 # information affecting work that should trigger a long-polling update
151 current_work = variable.Variable(None)
152 # information affecting work that should not trigger a long-polling update
153 current_work2 = variable.Variable(None)
155 requested = expiring_dict.ExpiringDict(300)
157 print 'Initializing work...'
158 @defer.inlineCallbacks
159 def set_real_work1():
160 work = yield getwork(bitcoind)
161 current_work2.set(dict(
163 transactions=work['transactions'],
164 merkle_branch=work['merkle_branch'],
165 subsidy=work['subsidy'],
166 clock_offset=time.time() - work['time'],
167 last_update=time.time(),
168 )) # second set first because everything hooks on the first
169 pre_current_work.set(dict(
170 version=work['version'],
171 previous_block=work['previous_block_hash'],
173 coinbaseflags=work['coinbaseflags'],
175 yield set_real_work1()
177 if '\ngetblock ' in (yield deferral.retry()(bitcoind.rpc_help)()):
178 height_cacher = deferral.DeferredCacher(defer.inlineCallbacks(lambda block_hash: defer.returnValue((yield bitcoind.rpc_getblock('%x' % (block_hash,)))['blockcount'])))
179 best_height_cached = variable.Variable((yield deferral.retry()(height_cacher)(pre_current_work.value['previous_block'])))
180 def get_height_rel_highest(block_hash):
181 this_height = height_cacher.call_now(block_hash, 0)
182 best_height = height_cacher.call_now(pre_current_work.value['previous_block'], 0)
183 best_height_cached.set(max(best_height_cached.value, this_height, best_height))
184 return this_height - best_height_cached.value
186 get_height_rel_highest = bitcoin_p2p.HeightTracker(bitcoind, factory, 5*net.SHARE_PERIOD*net.CHAIN_LENGTH/net.PARENT.BLOCK_PERIOD).get_height_rel_highest
188 def set_real_work2():
189 best, desired = tracker.think(get_height_rel_highest, pre_current_work.value['previous_block'], pre_current_work.value['bits'])
191 t = dict(pre_current_work.value)
192 t['best_share_hash'] = best
193 t['mm_chains'] = pre_merged_work.value
197 for peer2, share_hash in desired:
198 if share_hash not in tracker.tails: # was received in the time tracker.think was running
200 last_request_time, count = requested.get(share_hash, (None, 0))
201 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
203 potential_peers = set()
204 for head in tracker.tails[share_hash]:
205 potential_peers.update(peer_heads.get(head, set()))
206 potential_peers = [peer for peer in potential_peers if peer.connected2]
207 if count == 0 and peer2 is not None and peer2.connected2:
210 peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
214 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
218 stops=list(set(tracker.heads) | set(
219 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
222 requested[share_hash] = t, count + 1
223 pre_current_work.changed.watch(lambda _: set_real_work2())
224 pre_merged_work.changed.watch(lambda _: set_real_work2())
230 @defer.inlineCallbacks
231 def set_merged_work(merged_url, merged_userpass):
232 merged_proxy = jsonrpc.Proxy(merged_url, (merged_userpass,))
234 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged_proxy.rpc_getauxblock)()
235 pre_merged_work.set(dict(pre_merged_work.value, **{auxblock['chainid']: dict(
236 hash=int(auxblock['hash'], 16),
237 target=pack.IntType(256).unpack(auxblock['target'].decode('hex')),
238 merged_proxy=merged_proxy,
240 yield deferral.sleep(1)
241 for merged_url, merged_userpass in merged_urls:
242 set_merged_work(merged_url, merged_userpass)
244 @pre_merged_work.changed.watch
245 def _(new_merged_work):
246 print 'Got new merged mining work!'
248 # setup p2p logic and join p2pool network
250 class Node(p2p.Node):
251 def handle_shares(self, shares, peer):
253 print 'Processing %i shares from %s...' % (len(shares), '%s:%i' % peer.addr if peer is not None else None)
257 if share.hash in tracker.shares:
258 #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
263 #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
267 if shares and peer is not None:
268 peer_heads.setdefault(shares[0].hash, set()).add(peer)
274 print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
276 def handle_share_hashes(self, hashes, peer):
279 for share_hash in hashes:
280 if share_hash in tracker.shares:
282 last_request_time, count = requested.get(share_hash, (None, 0))
283 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
285 print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
286 get_hashes.append(share_hash)
287 requested[share_hash] = t, count + 1
289 if hashes and peer is not None:
290 peer_heads.setdefault(hashes[0], set()).add(peer)
292 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
294 def handle_get_shares(self, hashes, parents, stops, peer):
295 parents = min(parents, 1000//len(hashes))
298 for share_hash in hashes:
299 for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))):
300 if share.hash in stops:
303 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
304 peer.sendShares(shares)
306 @tracker.verified.added.watch
308 if share.pow_hash <= share.header['bits'].target:
309 if factory.conn.value is not None:
310 factory.conn.value.send_block(block=share.as_block(tracker))
312 print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Erp!'
314 print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % (p2pool_data.format_hash(share.hash), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
316 recent_blocks.append(dict(ts=share.timestamp, hash='%064x' % (share.header_hash,)))
318 print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
320 @defer.inlineCallbacks
323 ip, port = x.split(':')
324 defer.returnValue(((yield reactor.resolve(ip)), int(port)))
326 defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT))
329 if os.path.exists(os.path.join(datadir_path, 'addrs.txt')):
331 addrs.update(dict(eval(x) for x in open(os.path.join(datadir_path, 'addrs.txt'))))
333 print >>sys.stderr, "error reading addrs"
334 for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
337 if addr not in addrs:
338 addrs[addr] = (0, time.time(), time.time())
342 connect_addrs = set()
343 for addr_df in map(parse, args.p2pool_nodes):
345 connect_addrs.add((yield addr_df))
350 best_share_hash_func=lambda: current_work.value['best_share_hash'],
351 port=args.p2pool_port,
354 connect_addrs=connect_addrs,
359 open(os.path.join(datadir_path, 'addrs.txt'), 'w').writelines(repr(x) + '\n' for x in p2p_node.addr_store.iteritems())
360 task.LoopingCall(save_addrs).start(60)
362 # send share when the chain changes to their chain
363 def work_changed(new_work):
364 #print 'Work changed:', new_work
366 for share in tracker.get_chain(new_work['best_share_hash'], min(5, tracker.get_height(new_work['best_share_hash']))):
367 if share.hash in shared_share_hashes:
369 shared_share_hashes.add(share.hash)
372 for peer in p2p_node.peers.itervalues():
373 peer.sendShares([share for share in shares if share.peer is not peer])
375 current_work.changed.watch(work_changed)
378 for share in tracker.get_chain(current_work.value['best_share_hash'], min(tracker.get_height(current_work.value['best_share_hash']), 2*net.CHAIN_LENGTH)):
380 if share.hash in tracker.verified.shares:
381 ss.add_verified_hash(share.hash)
382 task.LoopingCall(save_shares).start(60)
387 start_time = time.time()
389 @defer.inlineCallbacks
393 is_lan, lan_ip = yield ipdiscover.get_local_ip()
395 pm = yield portmapper.get_port_mapper()
396 yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
397 except defer.TimeoutError:
401 log.err(None, "UPnP error:")
402 yield deferral.sleep(random.expovariate(1/120))
407 # start listening for workers with a JSON-RPC server
409 print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1])
411 if os.path.exists(os.path.join(datadir_path, 'vip_pass')):
412 with open(os.path.join(datadir_path, 'vip_pass'), 'rb') as f:
413 vip_pass = f.read().strip('\r\n')
415 vip_pass = '%016x' % (random.randrange(2**64),)
416 with open(os.path.join(datadir_path, 'vip_pass'), 'wb') as f:
418 print ' Worker password:', vip_pass, '(only required for generating graphs)'
422 removed_unstales_var = variable.Variable((0, 0, 0))
423 @tracker.verified.removed.watch
425 if share.hash in my_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
426 assert share.share_data['stale_info'] in [0, 253, 254] # we made these shares in this instance
427 removed_unstales_var.set((
428 removed_unstales_var.value[0] + 1,
429 removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 253 else 0),
430 removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 254 else 0),
433 removed_doa_unstales_var = variable.Variable(0)
434 @tracker.verified.removed.watch
436 if share.hash in my_doa_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
437 removed_doa_unstales.set(removed_doa_unstales.value + 1)
439 def get_stale_counts():
440 '''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)'''
441 my_shares = len(my_share_hashes)
442 my_doa_shares = len(my_doa_share_hashes)
443 delta = tracker.verified.get_delta(current_work.value['best_share_hash'])
444 my_shares_in_chain = delta.my_count + removed_unstales_var.value[0]
445 my_doa_shares_in_chain = delta.my_doa_count + removed_doa_unstales_var.value
446 orphans_recorded_in_chain = delta.my_orphan_announce_count + removed_unstales_var.value[1]
447 doas_recorded_in_chain = delta.my_dead_announce_count + removed_unstales_var.value[2]
449 my_shares_not_in_chain = my_shares - my_shares_in_chain
450 my_doa_shares_not_in_chain = my_doa_shares - my_doa_shares_in_chain
452 return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
455 local_rate_monitor = math.RateMonitor(10*60)
457 class WorkerBridge(worker_interface.WorkerBridge):
459 worker_interface.WorkerBridge.__init__(self)
460 self.new_work_event = current_work.changed
461 self.recent_shares_ts_work = []
463 def _get_payout_pubkey_hash_from_username(self, user):
467 return bitcoin_data.address_to_pubkey_hash(user, net.PARENT)
471 def preprocess_request(self, request):
472 payout_pubkey_hash = self._get_payout_pubkey_hash_from_username(request.getUser())
473 if payout_pubkey_hash is None or random.uniform(0, 100) < args.worker_fee:
474 payout_pubkey_hash = my_pubkey_hash
475 return payout_pubkey_hash,
477 def get_work(self, pubkey_hash):
478 if len(p2p_node.peers) == 0 and net.PERSIST:
479 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
480 if current_work.value['best_share_hash'] is None and net.PERSIST:
481 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
482 if time.time() > current_work2.value['last_update'] + 60:
483 raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
485 if current_work.value['mm_chains']:
486 tree, size = bitcoin_data.make_auxpow_tree(current_work.value['mm_chains'])
487 mm_hashes = [current_work.value['mm_chains'].get(tree.get(i), dict(hash=0))['hash'] for i in xrange(size)]
488 mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(dict(
489 merkle_root=bitcoin_data.merkle_hash(mm_hashes),
493 mm_later = [(aux_work, mm_hashes.index(aux_work['hash']), mm_hashes) for chain_id, aux_work in current_work.value['mm_chains'].iteritems()]
498 share_info, generate_tx = p2pool_data.generate_transaction(
501 previous_share_hash=current_work.value['best_share_hash'],
502 coinbase=(mm_data + current_work.value['coinbaseflags'])[:100],
503 nonce=struct.pack('<Q', random.randrange(2**64)),
504 new_script=bitcoin_data.pubkey_hash_to_script2(pubkey_hash),
505 subsidy=current_work2.value['subsidy'],
506 donation=math.perfect_round(65535*args.donation_percentage/100),
507 stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
508 253 if orphans > orphans_recorded_in_chain else
509 254 if doas > doas_recorded_in_chain else
511 )(*get_stale_counts()),
513 block_target=current_work.value['bits'].target,
514 desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
518 target = net.PARENT.SANE_MAX_TARGET
519 if len(self.recent_shares_ts_work) == 50:
520 hash_rate = sum(work for ts, work in self.recent_shares_ts_work)//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
521 target = min(target, 2**256//(hash_rate * 5))
522 target = max(target, share_info['bits'].target)
523 for aux_work in current_work.value['mm_chains'].itervalues():
524 target = max(target, aux_work['target'])
526 transactions = [generate_tx] + list(current_work2.value['transactions'])
527 merkle_root = bitcoin_data.check_merkle_branch(bitcoin_data.hash256(bitcoin_data.tx_type.pack(generate_tx)), 0, current_work2.value['merkle_branch'])
529 getwork_time = time.time()
530 merkle_branch = current_work2.value['merkle_branch']
532 print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % (
533 bitcoin_data.target_to_difficulty(target),
534 bitcoin_data.target_to_difficulty(share_info['bits'].target),
535 current_work2.value['subsidy']*1e-8, net.PARENT.SYMBOL,
536 len(current_work2.value['transactions']),
539 ba = bitcoin_getwork.BlockAttempt(
540 version=current_work.value['version'],
541 previous_block=current_work.value['previous_block'],
542 merkle_root=merkle_root,
543 timestamp=current_work2.value['time'],
544 bits=current_work.value['bits'],
548 received_header_hashes = set()
550 def got_response(header, request):
551 assert header['merkle_root'] == merkle_root
553 header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header))
554 pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
555 on_time = current_work.value['best_share_hash'] == share_info['share_data']['previous_share_hash']
558 if pow_hash <= header['bits'].target or p2pool.DEBUG:
559 @deferral.retry('Error submitting primary block: (will retry)', 10, 10)
561 if factory.conn.value is None:
562 print >>sys.stderr, 'No bitcoind connection when block submittal attempted! %s%32x' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
563 raise deferral.RetrySilentlyException()
564 factory.conn.value.send_block(block=dict(header=header, txs=transactions))
566 if pow_hash <= header['bits'].target:
568 print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
570 recent_blocks.append(dict(ts=time.time(), hash='%064x' % (header_hash,)))
572 log.err(None, 'Error while processing potential block:')
574 for aux_work, index, hashes in mm_later:
576 if pow_hash <= aux_work['target'] or p2pool.DEBUG:
577 df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)(
578 pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
579 bitcoin_data.aux_pow_type.pack(dict(
582 block_hash=header_hash,
583 merkle_branch=merkle_branch,
586 merkle_branch=bitcoin_data.calculate_merkle_branch(hashes, index),
588 parent_block_header=header,
593 if result != (pow_hash <= aux_work['target']):
594 print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
596 print 'Merged block submittal result: %s' % (result,)
599 log.err(err, 'Error submitting merged block:')
601 log.err(None, 'Error while processing merged mining POW:')
603 if pow_hash <= share_info['bits'].target:
604 share = p2pool_data.Share(net, header, share_info, merkle_branch=merkle_branch, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
605 print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
607 p2pool_data.format_hash(share.hash),
608 p2pool_data.format_hash(share.previous_hash),
609 time.time() - getwork_time,
610 ' DEAD ON ARRIVAL' if not on_time else '',
612 my_share_hashes.add(share.hash)
614 my_doa_share_hashes.add(share.hash)
618 tracker.verified.add(share)
622 if pow_hash <= header['bits'].target or p2pool.DEBUG:
623 for peer in p2p_node.peers.itervalues():
624 peer.sendShares([share])
625 shared_share_hashes.add(share.hash)
627 log.err(None, 'Error forwarding block solution:')
629 if pow_hash <= target and header_hash not in received_header_hashes:
630 reactor.callLater(1, grapher.add_localrate_point, bitcoin_data.target_to_average_attempts(target), not on_time)
631 if request.getPassword() == vip_pass:
632 reactor.callLater(1, grapher.add_localminer_point, request.getUser(), bitcoin_data.target_to_average_attempts(target), not on_time)
633 self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
634 while len(self.recent_shares_ts_work) > 50:
635 self.recent_shares_ts_work.pop(0)
636 local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=request.getUser()))
638 if header_hash in received_header_hashes:
639 print >>sys.stderr, 'Worker %s @ %s submitted share more than once!' % (request.getUser(), request.getClientIP())
640 received_header_hashes.add(header_hash)
642 if pow_hash > target:
643 print 'Worker %s submitted share with hash > target:' % (request.getUser(),)
644 print ' Hash: %56x' % (pow_hash,)
645 print ' Target: %56x' % (target,)
649 return ba, got_response
651 web_root = resource.Resource()
652 worker_interface.WorkerInterface(WorkerBridge()).attach_to(web_root)
655 if tracker.get_height(current_work.value['best_share_hash']) < 720:
656 return json.dumps(None)
657 return json.dumps(p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
658 / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720)))
661 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
662 weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
664 for script in sorted(weights, key=lambda s: weights[s]):
665 res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
666 return json.dumps(res)
668 def get_current_txouts():
669 share = tracker.shares[current_work.value['best_share_hash']]
670 share_info, gentx = p2pool_data.generate_transaction(tracker, share.share_info['share_data'], share.header['bits'].target, share.share_info['timestamp'], share.net)
671 return dict((out['script'], out['value']) for out in gentx['tx_outs'])
673 def get_current_scaled_txouts(scale, trunc=0):
674 txouts = get_current_txouts()
675 total = sum(txouts.itervalues())
676 results = dict((script, value*scale//total) for script, value in txouts.iteritems())
680 for s in sorted(results, key=results.__getitem__):
681 if results[s] >= trunc:
683 total_random += results[s]
686 winner = math.weighted_choice((script, results[script]) for script in random_set)
687 for script in random_set:
689 results[winner] = total_random
690 if sum(results.itervalues()) < int(scale):
691 results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
694 def get_current_payouts():
695 return json.dumps(dict((bitcoin_data.script2_to_human(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems()))
697 def get_patron_sendmany(this):
700 this, trunc = this.split('/', 1)
703 return json.dumps(dict(
704 (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
705 for script, value in get_current_scaled_txouts(scale=int(float(this)*1e8), trunc=int(float(trunc)*1e8)).iteritems()
706 if bitcoin_data.script2_to_address(script, net.PARENT) is not None
709 return json.dumps(None)
711 def get_global_stats():
712 # averaged over last hour
713 lookbehind = 3600//net.SHARE_PERIOD
714 if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
717 nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
718 stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
719 return json.dumps(dict(
720 pool_nonstale_hash_rate=nonstale_hash_rate,
721 pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
722 pool_stale_prop=stale_prop,
725 def get_local_stats():
726 lookbehind = 3600//net.SHARE_PERIOD
727 if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
730 global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
732 my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
733 my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
734 my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
735 my_share_count = my_unstale_count + my_orphan_count + my_doa_count
736 my_stale_count = my_orphan_count + my_doa_count
738 my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
740 my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
741 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1)
742 if share.hash in my_share_hashes)
743 actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp -
744 tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp)
745 share_att_s = my_work / actual_time
747 miner_hash_rates = {}
748 miner_dead_hash_rates = {}
749 datums, dt = local_rate_monitor.get_datums_in_last()
751 miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
753 miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
755 return json.dumps(dict(
756 my_hash_rates_in_last_hour=dict(
758 nonstale=share_att_s,
759 rewarded=share_att_s/(1 - global_stale_prop),
760 actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
762 my_share_counts_in_last_hour=dict(
763 shares=my_share_count,
764 unstale_shares=my_unstale_count,
765 stale_shares=my_stale_count,
766 orphan_stale_shares=my_orphan_count,
767 doa_stale_shares=my_doa_count,
769 my_stale_proportions_in_last_hour=dict(
771 orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
772 dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
774 miner_hash_rates=miner_hash_rates,
775 miner_dead_hash_rates=miner_dead_hash_rates,
778 def get_peer_addresses():
779 return ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues())
782 return json.dumps(time.time() - start_time)
784 class WebInterface(resource.Resource):
785 def __init__(self, func, mime_type, *fields):
786 self.func, self.mime_type, self.fields = func, mime_type, fields
788 def render_GET(self, request):
789 request.setHeader('Content-Type', self.mime_type)
790 request.setHeader('Access-Control-Allow-Origin', '*')
791 return self.func(*(request.args[field][0] for field in self.fields))
793 web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
794 web_root.putChild('users', WebInterface(get_users, 'application/json'))
795 web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
796 web_root.putChild('current_payouts', WebInterface(get_current_payouts, 'application/json'))
797 web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain', 'total'))
798 web_root.putChild('global_stats', WebInterface(get_global_stats, 'application/json'))
799 web_root.putChild('local_stats', WebInterface(get_local_stats, 'application/json'))
800 web_root.putChild('peer_addresses', WebInterface(get_peer_addresses, 'text/plain'))
801 web_root.putChild('payout_addr', WebInterface(lambda: json.dumps(bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)), 'application/json'))
802 web_root.putChild('recent_blocks', WebInterface(lambda: json.dumps(recent_blocks), 'application/json'))
803 web_root.putChild('uptime', WebInterface(get_uptime, 'application/json'))
805 web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
807 new_root = resource.Resource()
808 web_root.putChild('web', new_root)
811 if os.path.exists(os.path.join(datadir_path, 'stats')):
813 with open(os.path.join(datadir_path, 'stats'), 'rb') as f:
814 stat_log = json.loads(f.read())
816 log.err(None, 'Error loading stats:')
817 def update_stat_log():
818 while stat_log and stat_log[0]['time'] < time.time() - 24*60*60:
821 lookbehind = 3600//net.SHARE_PERIOD
822 if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
825 global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
826 (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
828 miner_hash_rates = {}
829 miner_dead_hash_rates = {}
830 datums, dt = local_rate_monitor.get_datums_in_last()
832 miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
834 miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
836 stat_log.append(dict(
838 pool_hash_rate=p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)/(1-global_stale_prop),
839 pool_stale_prop=global_stale_prop,
840 local_hash_rates=miner_hash_rates,
841 local_dead_hash_rates=miner_dead_hash_rates,
843 stale_shares=stale_orphan_shares + stale_doa_shares,
844 stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares),
845 current_payout=get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8,
847 incoming=sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
848 outgoing=sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming),
850 attempts_to_share=bitcoin_data.target_to_average_attempts(tracker.shares[current_work.value['best_share_hash']].target),
851 attempts_to_block=bitcoin_data.target_to_average_attempts(current_work.value['bits'].target),
852 block_value=current_work2.value['subsidy']*1e-8,
855 with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
856 f.write(json.dumps(stat_log))
857 task.LoopingCall(update_stat_log).start(5*60)
858 new_root.putChild('log', WebInterface(lambda: json.dumps(stat_log), 'application/json'))
860 grapher = graphs.Grapher(os.path.join(datadir_path, 'rrd'))
861 web_root.putChild('graphs', grapher.get_resource())
863 if tracker.get_height(current_work.value['best_share_hash']) < 720:
865 nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
866 poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
867 grapher.add_poolrate_point(poolrate, poolrate - nonstalerate)
868 task.LoopingCall(add_point).start(100)
870 def attempt_listen():
872 reactor.listenTCP(worker_endpoint[1], server.Site(web_root), interface=worker_endpoint[0])
873 except error.CannotListenError, e:
874 print >>sys.stderr, 'Error binding to worker port: %s. Retrying in 1 second.' % (e.socketError,)
875 reactor.callLater(1, attempt_listen)
877 with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
885 @defer.inlineCallbacks
888 flag = factory.new_block.get_deferred()
890 yield set_real_work1()
893 yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
898 print 'Started successfully!'
902 if hasattr(signal, 'SIGALRM'):
903 signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
904 sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
906 signal.siginterrupt(signal.SIGALRM, False)
907 task.LoopingCall(signal.alarm, 30).start(1)
909 if args.irc_announce:
910 from twisted.words.protocols import irc
911 class IRCClient(irc.IRCClient):
912 nickname = 'p2pool%02i' % (random.randrange(100),)
913 channel = '#p2pool' if net.NAME == 'bitcoin' else '#p2pool-alt'
914 def lineReceived(self, line):
916 irc.IRCClient.lineReceived(self, line)
918 irc.IRCClient.signedOn(self)
919 self.factory.resetDelay()
920 self.join(self.channel)
921 self.watch_id = tracker.verified.added.watch(self._new_share)
922 self.announced_hashes = set()
923 self.delayed_messages = {}
924 def privmsg(self, user, channel, message):
925 if channel == self.channel and message in self.delayed_messages:
926 self.delayed_messages.pop(message).cancel()
927 def _new_share(self, share):
928 if share.pow_hash <= share.header['bits'].target and share.header_hash not in self.announced_hashes and abs(share.timestamp - time.time()) < 10*60:
929 self.announced_hashes.add(share.header_hash)
930 message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
931 self.delayed_messages[message] = reactor.callLater(random.expovariate(1/5), lambda: (self.say(self.channel, message), self.delayed_messages.pop(message)))
932 def connectionLost(self, reason):
933 tracker.verified.added.unwatch(self.watch_id)
934 print 'IRC connection lost:', reason.getErrorMessage()
935 class IRCClientFactory(protocol.ReconnectingClientFactory):
937 reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())
939 @defer.inlineCallbacks
944 yield deferral.sleep(3)
946 if time.time() > current_work2.value['last_update'] + 60:
947 print >>sys.stderr, '''---> LOST CONTACT WITH BITCOIND for %s! Check that it isn't frozen or dead! <---''' % (math.format_dt(time.time() - current_work2.value['last_update']),)
949 height = tracker.get_height(current_work.value['best_share_hash'])
950 this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
952 len(tracker.verified.shares),
955 sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
956 ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
958 datums, dt = local_rate_monitor.get_datums_in_last()
959 my_att_s = sum(datum['work']/dt for datum in datums)
960 this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
961 math.format(int(my_att_s)),
963 math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95),
964 math.format_dt(2**256 / tracker.shares[current_work.value['best_share_hash']].target / my_att_s) if my_att_s and current_work.value['best_share_hash'] else '???',
968 (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
969 stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], min(720, height))
970 real_att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720)) / (1 - stale_prop)
972 this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % (
973 shares, stale_orphan_shares, stale_doa_shares,
974 math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
975 math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
976 get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8, net.PARENT.SYMBOL,
978 this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
979 math.format(int(real_att_s)),
981 math.format_dt(2**256 / current_work.value['bits'].target / real_att_s),
984 if this_str != last_str or time.time() > last_time + 15:
987 last_time = time.time()
992 log.err(None, 'Fatal error:')
996 class FixedArgumentParser(argparse.ArgumentParser):
997 def _read_args_from_files(self, arg_strings):
998 # expand arguments referencing files
1000 for arg_string in arg_strings:
1002 # for regular arguments, just add them back into the list
1003 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
1004 new_arg_strings.append(arg_string)
1006 # replace arguments referencing files with the file content
1009 args_file = open(arg_string[1:])
1012 for arg_line in args_file.read().splitlines():
1013 for arg in self.convert_arg_line_to_args(arg_line):
1014 arg_strings.append(arg)
1015 arg_strings = self._read_args_from_files(arg_strings)
1016 new_arg_strings.extend(arg_strings)
1020 err = sys.exc_info()[1]
1021 self.error(str(err))
1023 # return the modified argument list
1024 return new_arg_strings
1026 def convert_arg_line_to_args(self, arg_line):
1027 return [arg for arg in arg_line.split() if arg.strip()]
1030 realnets=dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
1032 parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
1033 parser.add_argument('--version', action='version', version=p2pool.__version__)
1034 parser.add_argument('--net',
1035 help='use specified network (default: bitcoin)',
1036 action='store', choices=sorted(realnets), default='bitcoin', dest='net_name')
1037 parser.add_argument('--testnet',
1038 help='''use the network's testnet''',
1039 action='store_const', const=True, default=False, dest='testnet')
1040 parser.add_argument('--debug',
1041 help='enable debugging mode',
1042 action='store_const', const=True, default=False, dest='debug')
1043 parser.add_argument('-a', '--address',
1044 help='generate payouts to this address (default: <address requested from bitcoind>)',
1045 type=str, action='store', default=None, dest='address')
1046 parser.add_argument('--datadir',
1047 help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
1048 type=str, action='store', default=None, dest='datadir')
1049 parser.add_argument('--logfile',
1050 help='''log to this file (default: data/<NET>/log)''',
1051 type=str, action='store', default=None, dest='logfile')
1052 parser.add_argument('--merged',
1053 help='call getauxblock on this url to get work for merged mining (example: http://ncuser:ncpass@127.0.0.1:10332/)',
1054 type=str, action='append', default=[], dest='merged_urls')
1055 parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
1056 help='donate this percentage of work to author of p2pool (default: 0.5)',
1057 type=float, action='store', default=0.5, dest='donation_percentage')
1058 parser.add_argument('--irc-announce',
1059 help='announce any blocks found on irc://irc.freenode.net/#p2pool',
1060 action='store_true', default=False, dest='irc_announce')
1062 p2pool_group = parser.add_argument_group('p2pool interface')
1063 p2pool_group.add_argument('--p2pool-port', metavar='PORT',
1064 help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
1065 type=int, action='store', default=None, dest='p2pool_port')
1066 p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
1067 help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
1068 type=str, action='append', default=[], dest='p2pool_nodes')
1069 parser.add_argument('--disable-upnp',
1070 help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
1071 action='store_false', default=True, dest='upnp')
1073 worker_group = parser.add_argument_group('worker interface')
1074 worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT',
1075 help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
1076 type=str, action='store', default=None, dest='worker_endpoint')
1077 worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
1078 help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
1079 type=float, action='store', default=0, dest='worker_fee')
1081 bitcoind_group = parser.add_argument_group('bitcoind interface')
1082 bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
1083 help='connect to this address (default: 127.0.0.1)',
1084 type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
1085 bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
1086 help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
1087 type=int, action='store', default=None, dest='bitcoind_rpc_port')
1088 bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
1089 help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
1090 type=int, action='store', default=None, dest='bitcoind_p2p_port')
1092 bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
1093 help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
1094 type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
1096 args = parser.parse_args()
1101 net_name = args.net_name + ('_testnet' if args.testnet else '')
1102 net = networks.nets[net_name]
1104 datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
1105 if not os.path.exists(datadir_path):
1106 os.makedirs(datadir_path)
1108 if len(args.bitcoind_rpc_userpass) > 2:
1109 parser.error('a maximum of two arguments are allowed')
1110 args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
1112 if args.bitcoind_rpc_password is None:
1113 if not hasattr(net.PARENT, 'CONF_FILE_FUNC'):
1114 parser.error('This network has no configuration file function. Manually enter your RPC password.')
1115 conf_path = net.PARENT.CONF_FILE_FUNC()
1116 if not os.path.exists(conf_path):
1117 parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
1118 '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
1121 '''rpcpassword=%x''' % (conf_path, random.randrange(2**128)))
1122 with open(conf_path, 'rb') as f:
1123 cp = ConfigParser.RawConfigParser()
1124 cp.readfp(StringIO.StringIO('[x]\r\n' + f.read()))
1125 for conf_name, var_name, var_type in [
1126 ('rpcuser', 'bitcoind_rpc_username', str),
1127 ('rpcpassword', 'bitcoind_rpc_password', str),
1128 ('rpcport', 'bitcoind_rpc_port', int),
1129 ('port', 'bitcoind_p2p_port', int),
1131 if getattr(args, var_name) is None and cp.has_option('x', conf_name):
1132 setattr(args, var_name, var_type(cp.get('x', conf_name)))
1134 if args.bitcoind_rpc_username is None:
1135 args.bitcoind_rpc_username = ''
1137 if args.bitcoind_rpc_port is None:
1138 args.bitcoind_rpc_port = net.PARENT.RPC_PORT
1140 if args.bitcoind_p2p_port is None:
1141 args.bitcoind_p2p_port = net.PARENT.P2P_PORT
1143 if args.p2pool_port is None:
1144 args.p2pool_port = net.P2P_PORT
1146 if args.worker_endpoint is None:
1147 worker_endpoint = '', net.WORKER_PORT
1148 elif ':' not in args.worker_endpoint:
1149 worker_endpoint = '', int(args.worker_endpoint)
1151 addr, port = args.worker_endpoint.rsplit(':', 1)
1152 worker_endpoint = addr, int(port)
1154 if args.address is not None:
1156 args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
1157 except Exception, e:
1158 parser.error('error parsing address: ' + repr(e))
1160 args.pubkey_hash = None
1162 def separate_url(url):
1163 s = urlparse.urlsplit(url)
1164 if '@' not in s.netloc:
1165 parser.error('merged url netloc must contain an "@"')
1166 userpass, new_netloc = s.netloc.rsplit('@', 1)
1167 return urlparse.urlunsplit(s._replace(netloc=new_netloc)), userpass
1168 merged_urls = map(separate_url, args.merged_urls)
1170 if args.logfile is None:
1171 args.logfile = os.path.join(datadir_path, 'log')
1173 logfile = logging.LogFile(args.logfile)
1174 pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
1175 sys.stdout = logging.AbortPipe(pipe)
1176 sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
1177 if hasattr(signal, "SIGUSR1"):
1178 def sigusr1(signum, frame):
1179 print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
1181 print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
1182 signal.signal(signal.SIGUSR1, sigusr1)
1183 task.LoopingCall(logfile.reopen).start(5)
1185 reactor.callWhenRunning(main, args, net, datadir_path, merged_urls, worker_endpoint)