from __future__ import division
-import ConfigParser
-import StringIO
-import argparse
import base64
+import json
import os
import random
import sys
import traceback
import urlparse
+if '--iocp' in sys.argv:
+ from twisted.internet import iocpreactor
+ iocpreactor.install()
from twisted.internet import defer, reactor, protocol, task
from twisted.web import server
from twisted.python import log
from nattraverso import portmapper, ipdiscover
-import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
+import bitcoin.p2p as bitcoin_p2p, bitcoin.data as bitcoin_data
from bitcoin import worker_interface, height_tracker
-from util import expiring_dict, jsonrpc, variable, deferral, math, logging, pack
-from . import p2p, networks, web
+from util import fixargparse, jsonrpc, variable, deferral, math, logging
+from . import p2p, networks, web, work
import p2pool, p2pool.data as p2pool_data
@deferral.retry('Error getting work from bitcoind:', 3)
@defer.inlineCallbacks
-def getwork(bitcoind):
+def getwork(bitcoind, use_getblocktemplate=False):
+ def go():
+ if use_getblocktemplate:
+ return bitcoind.rpc_getblocktemplate(dict(mode='template'))
+ else:
+ return bitcoind.rpc_getmemorypool()
try:
- work = yield bitcoind.rpc_getmemorypool()
- except jsonrpc.Error, e:
- if e.code == -32601: # Method not found
+ work = yield go()
+ except jsonrpc.Error_for_code(-32601): # Method not found
+ use_getblocktemplate = not use_getblocktemplate
+ try:
+ work = yield go()
+ except jsonrpc.Error_for_code(-32601): # Method not found
print >>sys.stderr, 'Error: Bitcoin version too old! Upgrade to v0.5 or newer!'
raise deferral.RetrySilentlyException()
- raise
- packed_transactions = [x.decode('hex') for x in work['transactions']]
+ packed_transactions = [(x['data'] if isinstance(x, dict) else x).decode('hex') for x in work['transactions']]
+ if 'height' not in work:
+ work['height'] = (yield bitcoind.rpc_getblock(work['previousblockhash']))['height'] + 1
+ elif p2pool.DEBUG:
+ assert work['height'] == (yield bitcoind.rpc_getblock(work['previousblockhash']))['height'] + 1
defer.returnValue(dict(
version=work['version'],
- previous_block_hash=int(work['previousblockhash'], 16),
+ previous_block=int(work['previousblockhash'], 16),
transactions=map(bitcoin_data.tx_type.unpack, packed_transactions),
- merkle_link=bitcoin_data.calculate_merkle_link([0] + map(bitcoin_data.hash256, packed_transactions), 0), # using 0 is a bit of a hack, but will always work when index=0
subsidy=work['coinbasevalue'],
- time=work['time'],
+ time=work['time'] if 'time' in work else work['curtime'],
bits=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
coinbaseflags=work['coinbaseflags'].decode('hex') if 'coinbaseflags' in work else ''.join(x.decode('hex') for x in work['coinbaseaux'].itervalues()) if 'coinbaseaux' in work else '',
+ height=work['height'],
+ last_update=time.time(),
+ use_getblocktemplate=use_getblocktemplate,
))
@defer.inlineCallbacks
print 'p2pool (version %s)' % (p2pool.__version__,)
print
+ traffic_happened = variable.Event()
+
+ @defer.inlineCallbacks
+ def connect_p2p():
+ # connect to bitcoind over bitcoin-p2p
+ print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
+ factory = bitcoin_p2p.ClientFactory(net.PARENT)
+ reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
+ yield factory.getProtocol() # waits until handshake is successful
+ print ' ...success!'
+ print
+ defer.returnValue(factory)
+
+ if args.testnet: # establish p2p connection first if testnet so bitcoind can work without connections
+ factory = yield connect_p2p()
+
# connect to bitcoind over JSON-RPC and do initial getmemorypool
- url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
+ url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http', args.bitcoind_address, args.bitcoind_rpc_port)
print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
bitcoind = jsonrpc.Proxy(url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30)
@deferral.retry('Error while checking Bitcoin connection:', 1)
@defer.inlineCallbacks
def check():
- if not (yield net.PARENT.RPC_CHECK)(bitcoind):
+ if not (yield net.PARENT.RPC_CHECK(bitcoind)):
print >>sys.stderr, " Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
raise deferral.RetrySilentlyException()
- temp_work = yield getwork(bitcoind)
- if not net.VERSION_CHECK((yield bitcoind.rpc_getinfo())['version'], temp_work):
- print >>sys.stderr, ' Bitcoin version too old! BIP16 support required! Upgrade to 0.6.0rc4 or greater!'
+ if not net.VERSION_CHECK((yield bitcoind.rpc_getinfo())['version']):
+ print >>sys.stderr, ' Bitcoin version too old! Upgrade to 0.6.4 or newer!'
raise deferral.RetrySilentlyException()
- defer.returnValue(temp_work)
- temp_work = yield check()
- print ' ...success!'
- print ' Current block hash: %x' % (temp_work['previous_block_hash'],)
- print
+ yield check()
+ temp_work = yield getwork(bitcoind)
+
+ if not args.testnet:
+ factory = yield connect_p2p()
+
+ block_height_var = variable.Variable(None)
+ @defer.inlineCallbacks
+ def poll_height():
+ block_height_var.set((yield deferral.retry('Error while calling getblockcount:')(bitcoind.rpc_getblockcount)()))
+ yield poll_height()
+ task.LoopingCall(poll_height).start(60*60)
+
+ bitcoind_warning_var = variable.Variable(None)
+ @defer.inlineCallbacks
+ def poll_warnings():
+ errors = (yield deferral.retry('Error while calling getmininginfo:')(bitcoind.rpc_getmininginfo)())['errors']
+ bitcoind_warning_var.set(errors if errors != '' else None)
+ yield poll_warnings()
+ task.LoopingCall(poll_warnings).start(20*60)
- # connect to bitcoind over bitcoin-p2p
- print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
- factory = bitcoin_p2p.ClientFactory(net.PARENT)
- reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
- yield factory.getProtocol() # waits until handshake is successful
print ' ...success!'
+ print ' Current block hash: %x' % (temp_work['previous_block'],)
+ print ' Current block height: %i' % (block_height_var.value,)
print
print 'Determining payout address...'
shared_share_hashes = set()
ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
known_verified = set()
- recent_blocks = []
print "Loading shares..."
for i, (mode, contents) in enumerate(ss.get_shares()):
if mode == 'share':
- if contents.hash in tracker.shares:
+ if contents.hash in tracker.items:
continue
shared_share_hashes.add(contents.hash)
contents.time_seen = 0
tracker.add(contents)
- if len(tracker.shares) % 1000 == 0 and tracker.shares:
- print " %i" % (len(tracker.shares),)
+ if len(tracker.items) % 1000 == 0 and tracker.items:
+ print " %i" % (len(tracker.items),)
elif mode == 'verified_hash':
known_verified.add(contents)
else:
raise AssertionError()
print " ...inserting %i verified shares..." % (len(known_verified),)
for h in known_verified:
- if h not in tracker.shares:
+ if h not in tracker.items:
ss.forget_verified_share(h)
continue
- tracker.verified.add(tracker.shares[h])
- print " ...done loading %i shares!" % (len(tracker.shares),)
+ tracker.verified.add(tracker.items[h])
+ print " ...done loading %i shares!" % (len(tracker.items),)
print
tracker.removed.watch(lambda share: ss.forget_share(share.hash))
tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
- peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
+ print 'Initializing work...'
- pre_current_work = variable.Variable(None)
- pre_merged_work = variable.Variable({})
- # information affecting work that should trigger a long-polling update
- current_work = variable.Variable(None)
- # information affecting work that should not trigger a long-polling update
- current_work2 = variable.Variable(None)
- requested = expiring_dict.ExpiringDict(300)
+ # BITCOIND WORK
- print 'Initializing work...'
+ bitcoind_work = variable.Variable((yield getwork(bitcoind)))
@defer.inlineCallbacks
- def set_real_work1():
- work = yield getwork(bitcoind)
- current_work2.set(dict(
- time=work['time'],
- transactions=work['transactions'],
- merkle_link=work['merkle_link'],
- subsidy=work['subsidy'],
- clock_offset=time.time() - work['time'],
- last_update=time.time(),
- )) # second set first because everything hooks on the first
- pre_current_work.set(dict(
- version=work['version'],
- previous_block=work['previous_block_hash'],
- bits=work['bits'],
- coinbaseflags=work['coinbaseflags'],
- ))
- yield set_real_work1()
+ def work_poller():
+ while True:
+ flag = factory.new_block.get_deferred()
+ try:
+ bitcoind_work.set((yield getwork(bitcoind, bitcoind_work.value['use_getblocktemplate'])))
+ except:
+ log.err()
+ yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
+ work_poller()
- get_height_rel_highest = yield height_tracker.get_height_rel_highest_func(bitcoind, factory, lambda: pre_current_work.value['previous_block'], net)
+ # PEER WORK
+
+ best_block_header = variable.Variable(None)
+ def handle_header(new_header):
+ # check that header matches current target
+ if not (net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(new_header)) <= bitcoind_work.value['bits'].target):
+ return
+ bitcoind_best_block = bitcoind_work.value['previous_block']
+ if (best_block_header.value is None
+ or (
+ new_header['previous_block'] == bitcoind_best_block and
+ bitcoin_data.hash256(bitcoin_data.block_header_type.pack(best_block_header.value)) == bitcoind_best_block
+ ) # new is child of current and previous is current
+ or (
+ bitcoin_data.hash256(bitcoin_data.block_header_type.pack(new_header)) == bitcoind_best_block and
+ best_block_header.value['previous_block'] != bitcoind_best_block
+ )): # new is current and previous is not a child of current
+ best_block_header.set(new_header)
+ @defer.inlineCallbacks
+ def poll_header():
+ handle_header((yield factory.conn.value.get_block_header(bitcoind_work.value['previous_block'])))
+ bitcoind_work.changed.watch(lambda _: poll_header())
+ yield deferral.retry('Error while requesting best block header:')(poll_header)()
- def set_real_work2():
- best, desired = tracker.think(get_height_rel_highest, pre_current_work.value['previous_block'], pre_current_work.value['bits'])
-
- t = dict(pre_current_work.value)
- t['best_share_hash'] = best
- t['mm_chains'] = pre_merged_work.value
- current_work.set(t)
+ # BEST SHARE
+
+ known_txs_var = variable.Variable({}) # hash -> tx
+ mining_txs_var = variable.Variable({}) # hash -> tx
+ get_height_rel_highest = yield height_tracker.get_height_rel_highest_func(bitcoind, factory, lambda: bitcoind_work.value['previous_block'], net)
+
+ best_share_var = variable.Variable(None)
+ desired_var = variable.Variable(None)
+ def set_best_share():
+ best, desired = tracker.think(get_height_rel_highest, bitcoind_work.value['previous_block'], bitcoind_work.value['bits'], known_txs_var.value)
- t = time.time()
- for peer2, share_hash in desired:
- if share_hash not in tracker.tails: # was received in the time tracker.think was running
- continue
- last_request_time, count = requested.get(share_hash, (None, 0))
- if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
- continue
- potential_peers = set()
- for head in tracker.tails[share_hash]:
- potential_peers.update(peer_heads.get(head, set()))
- potential_peers = [peer for peer in potential_peers if peer.connected2]
- if count == 0 and peer2 is not None and peer2.connected2:
- peer = peer2
- else:
- peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
- if peer is None:
- continue
-
- print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
- peer.send_getshares(
- hashes=[share_hash],
- parents=2000,
- stops=list(set(tracker.heads) | set(
- tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
- ))[:100],
- )
- requested[share_hash] = t, count + 1
- pre_current_work.changed.watch(lambda _: set_real_work2())
- pre_merged_work.changed.watch(lambda _: set_real_work2())
- set_real_work2()
+ best_share_var.set(best)
+ desired_var.set(desired)
+ bitcoind_work.changed.watch(lambda _: set_best_share())
+ set_best_share()
+
print ' ...success!'
print
-
- @defer.inlineCallbacks
- def set_merged_work(merged_url, merged_userpass):
- merged_proxy = jsonrpc.Proxy(merged_url, dict(Authorization='Basic ' + base64.b64encode(merged_userpass)))
- while True:
- auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged_proxy.rpc_getauxblock)()
- pre_merged_work.set(dict(pre_merged_work.value, **{auxblock['chainid']: dict(
- hash=int(auxblock['hash'], 16),
- target=pack.IntType(256).unpack(auxblock['target'].decode('hex')),
- merged_proxy=merged_proxy,
- )}))
- yield deferral.sleep(1)
- for merged_url, merged_userpass in merged_urls:
- set_merged_work(merged_url, merged_userpass)
-
- @pre_merged_work.changed.watch
- def _(new_merged_work):
- print 'Got new merged mining work!'
-
# setup p2p logic and join p2pool network
+ # update mining_txs according to getwork results
+ @bitcoind_work.changed.run_and_watch
+ def _(_=None):
+ new_mining_txs = {}
+ new_known_txs = dict(known_txs_var.value)
+ for tx in bitcoind_work.value['transactions']:
+ tx_hash = bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx))
+ new_mining_txs[tx_hash] = tx
+ new_known_txs[tx_hash] = tx
+ mining_txs_var.set(new_mining_txs)
+ known_txs_var.set(new_known_txs)
+ # forward transactions seen to bitcoind
+ @known_txs_var.transitioned.watch
+ def _(before, after):
+ for tx_hash in set(after) - set(before):
+ factory.conn.value.send_tx(tx=after[tx_hash])
+
class Node(p2p.Node):
def handle_shares(self, shares, peer):
if len(shares) > 5:
new_count = 0
for share in shares:
- if share.hash in tracker.shares:
+ if share.hash in tracker.items:
#print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
continue
tracker.add(share)
- if shares and peer is not None:
- peer_heads.setdefault(shares[0].hash, set()).add(peer)
-
if new_count:
- set_real_work2()
+ set_best_share()
if len(shares) > 5:
- print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
+ print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.items), 2*net.CHAIN_LENGTH)
+ @defer.inlineCallbacks
def handle_share_hashes(self, hashes, peer):
- t = time.time()
- get_hashes = []
- for share_hash in hashes:
- if share_hash in tracker.shares:
- continue
- last_request_time, count = requested.get(share_hash, (None, 0))
- if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
- continue
- print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
- get_hashes.append(share_hash)
- requested[share_hash] = t, count + 1
-
- if hashes and peer is not None:
- peer_heads.setdefault(hashes[0], set()).add(peer)
- if get_hashes:
- peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
+ new_hashes = [x for x in hashes if x not in tracker.items]
+ if not new_hashes:
+ return
+ try:
+ shares = yield peer.get_shares(
+ hashes=new_hashes,
+ parents=0,
+ stops=[],
+ )
+ except:
+ log.err(None, 'in handle_share_hashes:')
+ else:
+ self.handle_shares(shares, peer)
def handle_get_shares(self, hashes, parents, stops, peer):
parents = min(parents, 1000//len(hashes))
shares.append(share)
print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
return shares
+
+ def handle_bestblock(self, header, peer):
+ if net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header)) > header['bits'].target:
+ raise p2p.PeerMisbehavingError('received block header fails PoW test')
+ handle_header(header)
+
+ @deferral.retry('Error submitting primary block: (will retry)', 10, 10)
+ def submit_block_p2p(block):
+ if factory.conn.value is None:
+ print >>sys.stderr, 'No bitcoind connection when block submittal attempted! %s%064x' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, bitcoin_data.hash256(bitcoin_data.block_header_type.pack(block['header'])))
+ raise deferral.RetrySilentlyException()
+ factory.conn.value.send_block(block=block)
@deferral.retry('Error submitting block: (will retry)', 10, 10)
@defer.inlineCallbacks
- def submit_block(block, ignore_failure):
- success = yield bitcoind.rpc_getmemorypool(bitcoin_data.block_type.pack(block).encode('hex'))
+ def submit_block_rpc(block, ignore_failure):
+ if bitcoind_work.value['use_getblocktemplate']:
+ result = yield bitcoind.rpc_submitblock(bitcoin_data.block_type.pack(block).encode('hex'))
+ success = result is None
+ else:
+ result = yield bitcoind.rpc_getmemorypool(bitcoin_data.block_type.pack(block).encode('hex'))
+ success = result
success_expected = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(block['header'])) <= block['header']['bits'].target
if (not success and success_expected and not ignore_failure) or (success and not success_expected):
- print >>sys.stderr, 'Block submittal result: %s Expected: %s' % (result, expected_result)
+ print >>sys.stderr, 'Block submittal result: %s (%r) Expected: %s' % (success, result, success_expected)
+
+ def submit_block(block, ignore_failure):
+ submit_block_p2p(block)
+ submit_block_rpc(block, ignore_failure)
@tracker.verified.added.watch
def _(share):
if share.pow_hash <= share.header['bits'].target:
- submit_block(share.as_block(tracker), ignore_failure=True)
+ block = share.as_block(tracker, known_txs_var.value)
+ if block is None:
+ print >>sys.stderr, 'GOT INCOMPLETE BLOCK FROM PEER! %s bitcoin: %s%064x' % (p2pool_data.format_hash(share.hash), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
+ return
+ submit_block(block, ignore_failure=True)
print
print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % (p2pool_data.format_hash(share.hash), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
print
- recent_blocks.append(dict(ts=share.timestamp, hash='%064x' % (share.header_hash,)))
+ def spread():
+ if (get_height_rel_highest(share.header['previous_block']) > -5 or
+ bitcoind_work.value['previous_block'] in [share.header['previous_block'], share.header_hash]):
+ broadcast_share(share.hash)
+ spread()
+ reactor.callLater(5, spread) # so get_height_rel_highest can update
print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT))
addrs = {}
- if os.path.exists(os.path.join(datadir_path, 'addrs.txt')):
+ if os.path.exists(os.path.join(datadir_path, 'addrs')):
try:
- addrs.update(dict(eval(x) for x in open(os.path.join(datadir_path, 'addrs.txt'))))
+ with open(os.path.join(datadir_path, 'addrs'), 'rb') as f:
+ addrs.update(dict((tuple(k), v) for k, v in json.loads(f.read())))
except:
- print >>sys.stderr, "error reading addrs"
+ print >>sys.stderr, 'error parsing addrs'
for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
try:
addr = yield addr_df
log.err()
p2p_node = Node(
- best_share_hash_func=lambda: current_work.value['best_share_hash'],
+ best_share_hash_func=lambda: best_share_var.value,
port=args.p2pool_port,
net=net,
addr_store=addrs,
connect_addrs=connect_addrs,
max_incoming_conns=args.p2pool_conns,
+ traffic_happened=traffic_happened,
+ known_txs_var=known_txs_var,
+ mining_txs_var=mining_txs_var,
)
p2p_node.start()
- task.LoopingCall(lambda: open(os.path.join(datadir_path, 'addrs.txt'), 'w').writelines(repr(x) + '\n' for x in p2p_node.addr_store.iteritems())).start(60)
+ def save_addrs():
+ with open(os.path.join(datadir_path, 'addrs'), 'wb') as f:
+ f.write(json.dumps(p2p_node.addr_store.items()))
+ task.LoopingCall(save_addrs).start(60)
- # send share when the chain changes to their chain
- def work_changed(new_work):
- #print 'Work changed:', new_work
+ @best_block_header.changed.watch
+ def _(header):
+ for peer in p2p_node.peers.itervalues():
+ peer.send_bestblock(header=header)
+
+ @defer.inlineCallbacks
+ def broadcast_share(share_hash):
shares = []
- for share in tracker.get_chain(new_work['best_share_hash'], min(5, tracker.get_height(new_work['best_share_hash']))):
+ for share in tracker.get_chain(share_hash, min(5, tracker.get_height(share_hash))):
if share.hash in shared_share_hashes:
break
shared_share_hashes.add(share.hash)
shares.append(share)
- for peer in p2p_node.peers.itervalues():
- peer.sendShares([share for share in shares if share.peer is not peer])
+ for peer in list(p2p_node.peers.itervalues()):
+ yield peer.sendShares([share for share in shares if share.peer is not peer], tracker, known_txs_var.value, include_txs_with=[share_hash])
- current_work.changed.watch(work_changed)
+ # send share when the chain changes to their chain
+ best_share_var.changed.watch(broadcast_share)
def save_shares():
- for share in tracker.get_chain(current_work.value['best_share_hash'], min(tracker.get_height(current_work.value['best_share_hash']), 2*net.CHAIN_LENGTH)):
+ for share in tracker.get_chain(best_share_var.value, min(tracker.get_height(best_share_var.value), 2*net.CHAIN_LENGTH)):
ss.add_share(share)
- if share.hash in tracker.verified.shares:
+ if share.hash in tracker.verified.items:
ss.add_verified_hash(share.hash)
task.LoopingCall(save_shares).start(60)
+ @apply
+ @defer.inlineCallbacks
+ def download_shares():
+ while True:
+ desired = yield desired_var.get_when_satisfies(lambda val: len(val) != 0)
+ peer2, share_hash = random.choice(desired)
+
+ if len(p2p_node.peers) == 0:
+ yield deferral.sleep(1)
+ continue
+ peer = random.choice(p2p_node.peers.values())
+
+ print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
+ try:
+ shares = yield peer.get_shares(
+ hashes=[share_hash],
+ parents=500,
+ stops=[],
+ )
+ except:
+ log.err(None, 'in download_shares:')
+ continue
+
+ if not shares:
+ yield deferral.sleep(1) # sleep so we don't keep rerequesting the same share nobody has
+ continue
+ p2p_node.handle_shares(shares, peer)
+
print ' ...success!'
print
print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1])
- if os.path.exists(os.path.join(datadir_path, 'vip_pass')):
- with open(os.path.join(datadir_path, 'vip_pass'), 'rb') as f:
- vip_pass = f.read().strip('\r\n')
- else:
- vip_pass = '%016x' % (random.randrange(2**64),)
- with open(os.path.join(datadir_path, 'vip_pass'), 'wb') as f:
- f.write(vip_pass)
+ get_current_txouts = lambda: p2pool_data.get_expected_payouts(tracker, best_share_var.value, bitcoind_work.value['bits'].target, bitcoind_work.value['subsidy'], net)
- # setup worker logic
-
- removed_unstales_var = variable.Variable((0, 0, 0))
- removed_doa_unstales_var = variable.Variable(0)
- @tracker.verified.removed.watch
- def _(share):
- if share.hash in my_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
- assert share.share_data['stale_info'] in [0, 253, 254] # we made these shares in this instance
- removed_unstales_var.set((
- removed_unstales_var.value[0] + 1,
- removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 253 else 0),
- removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 254 else 0),
- ))
- if share.hash in my_doa_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
- removed_doa_unstales.set(removed_doa_unstales.value + 1)
-
- def get_stale_counts():
- '''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)'''
- my_shares = len(my_share_hashes)
- my_doa_shares = len(my_doa_share_hashes)
- delta = tracker.verified.get_delta(current_work.value['best_share_hash'])
- my_shares_in_chain = delta.my_count + removed_unstales_var.value[0]
- my_doa_shares_in_chain = delta.my_doa_count + removed_doa_unstales_var.value
- orphans_recorded_in_chain = delta.my_orphan_announce_count + removed_unstales_var.value[1]
- doas_recorded_in_chain = delta.my_dead_announce_count + removed_unstales_var.value[2]
-
- my_shares_not_in_chain = my_shares - my_shares_in_chain
- my_doa_shares_not_in_chain = my_doa_shares - my_doa_shares_in_chain
-
- return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
-
-
- pseudoshare_received = variable.Event()
- share_received = variable.Event()
- local_rate_monitor = math.RateMonitor(10*60)
-
- class WorkerBridge(worker_interface.WorkerBridge):
- def __init__(self):
- worker_interface.WorkerBridge.__init__(self)
- self.new_work_event = current_work.changed
- self.recent_shares_ts_work = []
-
- def preprocess_request(self, request):
- user = request.getUser() if request.getUser() is not None else ''
-
- desired_pseudoshare_target = None
- if '+' in user:
- user, desired_pseudoshare_difficulty_str = user.rsplit('+', 1)
- try:
- desired_pseudoshare_target = bitcoin_data.difficulty_to_target(float(desired_pseudoshare_difficulty_str))
- except:
- pass
-
- desired_share_target = 2**256 - 1
- if '/' in user:
- user, min_diff_str = user.rsplit('/', 1)
- try:
- desired_share_target = bitcoin_data.difficulty_to_target(float(min_diff_str))
- except:
- pass
-
- if random.uniform(0, 100) < args.worker_fee:
- pubkey_hash = my_pubkey_hash
- else:
- try:
- pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, net.PARENT)
- except: # XXX blah
- pubkey_hash = my_pubkey_hash
-
- return pubkey_hash, desired_share_target, desired_pseudoshare_target
-
- def get_work(self, pubkey_hash, desired_share_target, desired_pseudoshare_target):
- if len(p2p_node.peers) == 0 and net.PERSIST:
- raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
- if current_work.value['best_share_hash'] is None and net.PERSIST:
- raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
- if time.time() > current_work2.value['last_update'] + 60:
- raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
-
- if current_work.value['mm_chains']:
- tree, size = bitcoin_data.make_auxpow_tree(current_work.value['mm_chains'])
- mm_hashes = [current_work.value['mm_chains'].get(tree.get(i), dict(hash=0))['hash'] for i in xrange(size)]
- mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(dict(
- merkle_root=bitcoin_data.merkle_hash(mm_hashes),
- size=size,
- nonce=0,
- ))
- mm_later = [(aux_work, mm_hashes.index(aux_work['hash']), mm_hashes) for chain_id, aux_work in current_work.value['mm_chains'].iteritems()]
- else:
- mm_data = ''
- mm_later = []
-
- if True:
- share_info, generate_tx = p2pool_data.Share.generate_transaction(
- tracker=tracker,
- share_data=dict(
- previous_share_hash=current_work.value['best_share_hash'],
- coinbase=(mm_data + current_work.value['coinbaseflags'])[:100],
- nonce=random.randrange(2**32),
- pubkey_hash=pubkey_hash,
- subsidy=current_work2.value['subsidy'],
- donation=math.perfect_round(65535*args.donation_percentage/100),
- stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
- 253 if orphans > orphans_recorded_in_chain else
- 254 if doas > doas_recorded_in_chain else
- 0
- )(*get_stale_counts()),
- desired_version=1,
- ),
- block_target=current_work.value['bits'].target,
- desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
- desired_target=desired_share_target,
- ref_merkle_link=dict(branch=[], index=0),
- net=net,
- )
-
- target = net.PARENT.SANE_MAX_TARGET
- if desired_pseudoshare_target is None:
- if len(self.recent_shares_ts_work) == 50:
- hash_rate = sum(work for ts, work in self.recent_shares_ts_work[1:])//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
- target = min(target, 2**256//hash_rate)
- else:
- target = min(target, desired_pseudoshare_target)
- target = max(target, share_info['bits'].target)
- for aux_work in current_work.value['mm_chains'].itervalues():
- target = max(target, aux_work['target'])
-
- transactions = [generate_tx] + list(current_work2.value['transactions'])
- packed_generate_tx = bitcoin_data.tx_type.pack(generate_tx)
- merkle_root = bitcoin_data.check_merkle_link(bitcoin_data.hash256(packed_generate_tx), current_work2.value['merkle_link'])
-
- getwork_time = time.time()
- merkle_link = current_work2.value['merkle_link']
-
- print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % (
- bitcoin_data.target_to_difficulty(target),
- bitcoin_data.target_to_difficulty(share_info['bits'].target),
- current_work2.value['subsidy']*1e-8, net.PARENT.SYMBOL,
- len(current_work2.value['transactions']),
- )
-
- ba = bitcoin_getwork.BlockAttempt(
- version=current_work.value['version'],
- previous_block=current_work.value['previous_block'],
- merkle_root=merkle_root,
- timestamp=current_work2.value['time'],
- bits=current_work.value['bits'],
- share_target=target,
- )
-
- received_header_hashes = set()
-
- def got_response(header, request):
- assert header['merkle_root'] == merkle_root
-
- header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header))
- pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
- on_time = current_work.value['best_share_hash'] == share_info['share_data']['previous_share_hash']
-
- try:
- if pow_hash <= header['bits'].target or p2pool.DEBUG:
- submit_block(dict(header=header, txs=transactions), ignore_failure=False)
- if pow_hash <= header['bits'].target:
- print
- print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
- print
- recent_blocks.append(dict(ts=time.time(), hash='%064x' % (header_hash,)))
- except:
- log.err(None, 'Error while processing potential block:')
-
- for aux_work, index, hashes in mm_later:
- try:
- if pow_hash <= aux_work['target'] or p2pool.DEBUG:
- df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)(
- pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
- bitcoin_data.aux_pow_type.pack(dict(
- merkle_tx=dict(
- tx=transactions[0],
- block_hash=header_hash,
- merkle_link=merkle_link,
- ),
- merkle_link=bitcoin_data.calculate_merkle_link(hashes, index),
- parent_block_header=header,
- )).encode('hex'),
- )
- @df.addCallback
- def _(result):
- if result != (pow_hash <= aux_work['target']):
- print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
- else:
- print 'Merged block submittal result: %s' % (result,)
- @df.addErrback
- def _(err):
- log.err(err, 'Error submitting merged block:')
- except:
- log.err(None, 'Error while processing merged mining POW:')
-
- if pow_hash <= share_info['bits'].target and header_hash not in received_header_hashes:
- min_header = dict(header);del min_header['merkle_root']
- hash_link = p2pool_data.prefix_to_hash_link(packed_generate_tx[:-32-4], p2pool_data.Share.gentx_before_refhash)
- share = p2pool_data.Share(net, None, dict(
- min_header=min_header, share_info=share_info, hash_link=hash_link,
- ref_merkle_link=dict(branch=[], index=0),
- ), merkle_link=merkle_link, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
-
- print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
- request.getUser(),
- p2pool_data.format_hash(share.hash),
- p2pool_data.format_hash(share.previous_hash),
- time.time() - getwork_time,
- ' DEAD ON ARRIVAL' if not on_time else '',
- )
- my_share_hashes.add(share.hash)
- if not on_time:
- my_doa_share_hashes.add(share.hash)
-
- tracker.add(share)
- if not p2pool.DEBUG:
- tracker.verified.add(share)
- set_real_work2()
-
- try:
- if pow_hash <= header['bits'].target or p2pool.DEBUG:
- for peer in p2p_node.peers.itervalues():
- peer.sendShares([share])
- shared_share_hashes.add(share.hash)
- except:
- log.err(None, 'Error forwarding block solution:')
-
- share_received.happened(bitcoin_data.target_to_average_attempts(share.target), not on_time)
-
- if pow_hash > target:
- print 'Worker %s submitted share with hash > target:' % (request.getUser(),)
- print ' Hash: %56x' % (pow_hash,)
- print ' Target: %56x' % (target,)
- elif header_hash in received_header_hashes:
- print >>sys.stderr, 'Worker %s @ %s submitted share more than once!' % (request.getUser(), request.getClientIP())
- else:
- received_header_hashes.add(header_hash)
-
- pseudoshare_received.happened(bitcoin_data.target_to_average_attempts(target), not on_time, request.getUser(), request.getPassword() == vip_pass)
- self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
- while len(self.recent_shares_ts_work) > 50:
- self.recent_shares_ts_work.pop(0)
- local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=request.getUser()))
-
- return on_time
-
- return ba, got_response
-
- get_current_txouts = lambda: p2pool_data.get_expected_payouts(tracker, current_work.value['best_share_hash'], current_work.value['bits'].target, current_work2.value['subsidy'], net)
-
- web_root = web.get_web_root(tracker, current_work, current_work2, get_current_txouts, datadir_path, net, get_stale_counts, my_pubkey_hash, local_rate_monitor, args.worker_fee, p2p_node, my_share_hashes, recent_blocks, pseudoshare_received, share_received)
- worker_interface.WorkerInterface(WorkerBridge()).attach_to(web_root, get_handler=lambda request: request.redirect('/static/'))
+ wb = work.WorkerBridge(my_pubkey_hash, net, args.donation_percentage, bitcoind_work, best_block_header, merged_urls, best_share_var, tracker, my_share_hashes, my_doa_share_hashes, args.worker_fee, p2p_node, submit_block, set_best_share, broadcast_share, block_height_var)
+ web_root = web.get_web_root(tracker, bitcoind_work, get_current_txouts, datadir_path, net, wb.get_stale_counts, my_pubkey_hash, wb.local_rate_monitor, args.worker_fee, p2p_node, wb.my_share_hashes, wb.pseudoshare_received, wb.share_received, best_share_var, bitcoind_warning_var, traffic_happened)
+ worker_interface.WorkerInterface(wb).attach_to(web_root, get_handler=lambda request: request.redirect('/static/'))
deferral.retry('Error binding to worker port:', traceback=False)(reactor.listenTCP)(worker_endpoint[1], server.Site(web_root), interface=worker_endpoint[0])
print
- @defer.inlineCallbacks
- def work_poller():
- while True:
- flag = factory.new_block.get_deferred()
- try:
- yield set_real_work1()
- except:
- log.err()
- yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
- work_poller()
-
-
# done!
print 'Started successfully!'
print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % (worker_endpoint[1],)
nickname = 'p2pool%02i' % (random.randrange(100),)
channel = net.ANNOUNCE_CHANNEL
def lineReceived(self, line):
- print repr(line)
+ if p2pool.DEBUG:
+ print repr(line)
irc.IRCClient.lineReceived(self, line)
def signedOn(self):
irc.IRCClient.signedOn(self)
if share.pow_hash <= share.header['bits'].target and abs(share.timestamp - time.time()) < 10*60:
yield deferral.sleep(random.expovariate(1/60))
message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
- if message not in self.recent_messages:
+ if all('%x' % (share.header_hash,) not in old_message for old_message in self.recent_messages):
self.say(self.channel, message)
self._remember_message(message)
self.watch_id = tracker.verified.added.watch(new_share)
while True:
yield deferral.sleep(3)
try:
- if time.time() > current_work2.value['last_update'] + 60:
- print >>sys.stderr, '''---> LOST CONTACT WITH BITCOIND for %s! Check that it isn't frozen or dead! <---''' % (math.format_dt(time.time() - current_work2.value['last_update']),)
-
- height = tracker.get_height(current_work.value['best_share_hash'])
+ height = tracker.get_height(best_share_var.value)
this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
height,
- len(tracker.verified.shares),
- len(tracker.shares),
+ len(tracker.verified.items),
+ len(tracker.items),
len(p2p_node.peers),
sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
- datums, dt = local_rate_monitor.get_datums_in_last()
+ datums, dt = wb.local_rate_monitor.get_datums_in_last()
my_att_s = sum(datum['work']/dt for datum in datums)
this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
math.format(int(my_att_s)),
math.format_dt(dt),
math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95),
- math.format_dt(2**256 / tracker.shares[current_work.value['best_share_hash']].max_target / my_att_s) if my_att_s and current_work.value['best_share_hash'] else '???',
+ math.format_dt(2**256 / tracker.items[best_share_var.value].max_target / my_att_s) if my_att_s and best_share_var.value else '???',
)
if height > 2:
- (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
- stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], min(720, height))
- real_att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720)) / (1 - stale_prop)
+ (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()
+ stale_prop = p2pool_data.get_average_stale_prop(tracker, best_share_var.value, min(60*60//net.SHARE_PERIOD, height))
+ real_att_s = p2pool_data.get_pool_attempts_per_second(tracker, best_share_var.value, min(height - 1, 60*60//net.SHARE_PERIOD)) / (1 - stale_prop)
this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % (
shares, stale_orphan_shares, stale_doa_shares,
this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
math.format(int(real_att_s)),
100*stale_prop,
- math.format_dt(2**256 / current_work.value['bits'].target / real_att_s),
+ math.format_dt(2**256 / bitcoind_work.value['bits'].target / real_att_s),
)
- desired_version_counts = p2pool_data.get_desired_version_counts(tracker, current_work.value['best_share_hash'], min(720, height))
- majority_desired_version = max(desired_version_counts, key=lambda k: desired_version_counts[k])
- if majority_desired_version not in [0, 1]:
+ for warning in p2pool_data.get_warnings(tracker, best_share_var.value, net, bitcoind_warning_var.value, bitcoind_work.value):
print >>sys.stderr, '#'*40
- print >>sys.stderr, '>>> WARNING: A MAJORITY OF SHARES CONTAIN A VOTE FOR AN UNSUPPORTED SHARE IMPLEMENTATION! (v%i with %i%% support)' % (
- majority_desired_version, 100*desired_version_counts[majority_desired_version]/sum(desired_version_counts.itervalues()))
- print >>sys.stderr, '>>> An upgrade is likely necessary. Check http://p2pool.forre.st/ for more information.'
+ print >>sys.stderr, '>>> Warning: ' + warning
print >>sys.stderr, '#'*40
if this_str != last_str or time.time() > last_time + 15:
log.err(None, 'Fatal error:')
def run():
- class FixedArgumentParser(argparse.ArgumentParser):
- def _read_args_from_files(self, arg_strings):
- # expand arguments referencing files
- new_arg_strings = []
- for arg_string in arg_strings:
-
- # for regular arguments, just add them back into the list
- if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
- new_arg_strings.append(arg_string)
-
- # replace arguments referencing files with the file content
- else:
- try:
- args_file = open(arg_string[1:])
- try:
- arg_strings = []
- for arg_line in args_file.read().splitlines():
- for arg in self.convert_arg_line_to_args(arg_line):
- arg_strings.append(arg)
- arg_strings = self._read_args_from_files(arg_strings)
- new_arg_strings.extend(arg_strings)
- finally:
- args_file.close()
- except IOError:
- err = sys.exc_info()[1]
- self.error(str(err))
-
- # return the modified argument list
- return new_arg_strings
-
- def convert_arg_line_to_args(self, arg_line):
- return [arg for arg in arg_line.split() if arg.strip()]
-
+ realnets = dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
- realnets=dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
-
- parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
+ parser = fixargparse.FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
parser.add_argument('--version', action='version', version=p2pool.__version__)
parser.add_argument('--net',
help='use specified network (default: bitcoin)',
parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
help='donate this percentage of work towards the development of p2pool (default: 0.5)',
type=float, action='store', default=0.5, dest='donation_percentage')
+ parser.add_argument('--iocp',
+ help='use Windows IOCP API in order to avoid errors due to large number of sockets being open',
+ action='store_true', default=False, dest='iocp')
parser.add_argument('--irc-announce',
help='announce any blocks found on irc://irc.freenode.net/#p2pool',
action='store_true', default=False, dest='irc_announce')
+ parser.add_argument('--no-bugreport',
+ help='disable submitting caught exceptions to the author',
+ action='store_true', default=False, dest='no_bugreport')
p2pool_group = parser.add_argument_group('p2pool interface')
p2pool_group.add_argument('--p2pool-port', metavar='PORT',
bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
type=int, action='store', default=None, dest='bitcoind_rpc_port')
+ bitcoind_group.add_argument('--bitcoind-rpc-ssl',
+ help='connect to JSON-RPC interface using SSL',
+ action='store_true', default=False, dest='bitcoind_rpc_ssl')
bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
type=int, action='store', default=None, dest='bitcoind_p2p_port')
if args.debug:
p2pool.DEBUG = True
+ defer.setDebugging(True)
net_name = args.net_name + ('_testnet' if args.testnet else '')
net = networks.nets[net_name]
args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
if args.bitcoind_rpc_password is None:
- if not hasattr(net.PARENT, 'CONF_FILE_FUNC'):
- parser.error('This network has no configuration file function. Manually enter your RPC password.')
conf_path = net.PARENT.CONF_FILE_FUNC()
if not os.path.exists(conf_path):
parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
'''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
'''\r\n'''
'''server=1\r\n'''
- '''rpcpassword=%x''' % (conf_path, random.randrange(2**128)))
- with open(conf_path, 'rb') as f:
- cp = ConfigParser.RawConfigParser()
- cp.readfp(StringIO.StringIO('[x]\r\n' + f.read()))
- for conf_name, var_name, var_type in [
- ('rpcuser', 'bitcoind_rpc_username', str),
- ('rpcpassword', 'bitcoind_rpc_password', str),
- ('rpcport', 'bitcoind_rpc_port', int),
- ('port', 'bitcoind_p2p_port', int),
- ]:
- if getattr(args, var_name) is None and cp.has_option('x', conf_name):
- setattr(args, var_name, var_type(cp.get('x', conf_name)))
+ '''rpcpassword=%x\r\n'''
+ '''\r\n'''
+ '''Keep that password secret! After creating the file, restart Bitcoin.''' % (conf_path, random.randrange(2**128)))
+ conf = open(conf_path, 'rb').read()
+ contents = {}
+ for line in conf.splitlines(True):
+ if '#' in line:
+ line = line[:line.index('#')]
+ if '=' not in line:
+ continue
+ k, v = line.split('=', 1)
+ contents[k.strip()] = v.strip()
+ for conf_name, var_name, var_type in [
+ ('rpcuser', 'bitcoind_rpc_username', str),
+ ('rpcpassword', 'bitcoind_rpc_password', str),
+ ('rpcport', 'bitcoind_rpc_port', int),
+ ('port', 'bitcoind_p2p_port', int),
+ ]:
+ if getattr(args, var_name) is None and conf_name in contents:
+ setattr(args, var_name, var_type(contents[conf_name]))
if args.bitcoind_rpc_password is None:
parser.error('''Bitcoin configuration file didn't contain an rpcpassword= line! Add one!''')
signal.signal(signal.SIGUSR1, sigusr1)
task.LoopingCall(logfile.reopen).start(5)
+ class ErrorReporter(object):
+ def __init__(self):
+ self.last_sent = None
+
+ def emit(self, eventDict):
+ if not eventDict["isError"]:
+ return
+
+ if self.last_sent is not None and time.time() < self.last_sent + 5:
+ return
+ self.last_sent = time.time()
+
+ if 'failure' in eventDict:
+ text = ((eventDict.get('why') or 'Unhandled Error')
+ + '\n' + eventDict['failure'].getTraceback())
+ else:
+ text = " ".join([str(m) for m in eventDict["message"]]) + "\n"
+
+ from twisted.web import client
+ client.getPage(
+ url='http://u.forre.st/p2pool_error.cgi',
+ method='POST',
+ postdata=p2pool.__version__ + ' ' + net.NAME + '\n' + text,
+ timeout=15,
+ ).addBoth(lambda x: None)
+ if not args.no_bugreport:
+ log.addObserver(ErrorReporter().emit)
+
reactor.callWhenRunning(main, args, net, datadir_path, merged_urls, worker_endpoint)
reactor.run()