from twisted.python import log
import bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
-from bitcoin import script, worker_interface
-from util import jsonrpc, variable, deferral, math, pack
+from bitcoin import helper, script, worker_interface
+from util import forest, jsonrpc, variable, deferral, math, pack
import p2pool, p2pool.data as p2pool_data
class WorkerBridge(worker_interface.WorkerBridge):
- def __init__(self, my_pubkey_hash, net, donation_percentage, bitcoind_work, best_block_header, merged_urls, best_share_var, tracker, my_share_hashes, my_doa_share_hashes, worker_fee, p2p_node, submit_block, set_best_share, broadcast_share, block_height_var):
+ COINBASE_NONCE_LENGTH = 4
+
+ def __init__(self, node, my_pubkey, donation_percentage, merged_urls, worker_fee):
worker_interface.WorkerBridge.__init__(self)
self.recent_shares_ts_work = []
- self.my_pubkey_hash = my_pubkey_hash
- self.net = net
+ self.node = node
+ self.my_pubkey = my_pubkey
self.donation_percentage = donation_percentage
- self.bitcoind_work = bitcoind_work
- self.best_block_header = best_block_header
- self.best_share_var = best_share_var
- self.tracker = tracker
- self.my_share_hashes = my_share_hashes
- self.my_doa_share_hashes = my_doa_share_hashes
self.worker_fee = worker_fee
- self.p2p_node = p2p_node
- self.submit_block = submit_block
- self.set_best_share = set_best_share
- self.broadcast_share = broadcast_share
- self.block_height_var = block_height_var
+ self.running = True
self.pseudoshare_received = variable.Event()
self.share_received = variable.Event()
self.local_rate_monitor = math.RateMonitor(10*60)
self.removed_unstales_var = variable.Variable((0, 0, 0))
self.removed_doa_unstales_var = variable.Variable(0)
- @tracker.verified.removed.watch
+
+ self.my_share_hashes = set()
+ self.my_doa_share_hashes = set()
+
+ self.tracker_view = forest.TrackerView(self.node.tracker, forest.get_attributedelta_type(dict(forest.AttributeDelta.attrs,
+ my_count=lambda share: 1 if share.hash in self.my_share_hashes else 0,
+ my_doa_count=lambda share: 1 if share.hash in self.my_doa_share_hashes else 0,
+ my_orphan_announce_count=lambda share: 1 if share.hash in self.my_share_hashes and share.share_data['stale_info'] == 'orphan' else 0,
+ my_dead_announce_count=lambda share: 1 if share.hash in self.my_share_hashes and share.share_data['stale_info'] == 'doa' else 0,
+ )))
+
+ @self.node.tracker.verified.removed.watch
def _(share):
- if share.hash in self.my_share_hashes and tracker.is_child_of(share.hash, self.best_share_var.value):
+ if share.hash in self.my_share_hashes and self.node.tracker.is_child_of(share.hash, self.node.best_share_var.value):
assert share.share_data['stale_info'] in [None, 'orphan', 'doa'] # we made these shares in this instance
self.removed_unstales_var.set((
self.removed_unstales_var.value[0] + 1,
self.removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 'orphan' else 0),
self.removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 'doa' else 0),
))
- if share.hash in self.my_doa_share_hashes and self.tracker.is_child_of(share.hash, self.best_share_var.value):
+ if share.hash in self.my_doa_share_hashes and self.node.tracker.is_child_of(share.hash, self.node.best_share_var.value):
self.removed_doa_unstales_var.set(self.removed_doa_unstales_var.value + 1)
# MERGED WORK
@defer.inlineCallbacks
def set_merged_work(merged_url, merged_userpass):
- merged_proxy = jsonrpc.Proxy(merged_url, dict(Authorization='Basic ' + base64.b64encode(merged_userpass)))
- while True:
+ merged_proxy = jsonrpc.HTTPProxy(merged_url, dict(Authorization='Basic ' + base64.b64encode(merged_userpass)))
+ while self.running:
auxblock = yield deferral.retry('Error while calling merged getauxblock:', 30)(merged_proxy.rpc_getauxblock)()
self.merged_work.set(dict(self.merged_work.value, **{auxblock['chainid']: dict(
hash=int(auxblock['hash'], 16),
# COMBINE WORK
+
self.current_work = variable.Variable(None)
def compute_work():
- t = self.bitcoind_work.value
- bb = self.best_block_header.value
- if bb is not None and bb['previous_block'] == t['previous_block'] and net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(bb)) <= t['bits'].target:
+ t = self.node.bitcoind_work.value
+ bb = self.node.best_block_header.value
+
+# subsidy = self.node.net.PARENT.SUBSIDY_FUNC(self.node.pow_bits.target)
+
+ if bb is not None and bb['previous_block'] == t['previous_block'] and self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(bb)) <= t['bits'].target:
print 'Skipping from block %x to block %x!' % (bb['previous_block'],
- bitcoin_data.hash256(bitcoin_data.block_header_type.pack(bb)))
+ self.node.net.PARENT.BLOCKHASH_FUNC(bitcoin_data.block_header_type.pack(bb)))
t = dict(
version=bb['version'],
- previous_block=bitcoin_data.hash256(bitcoin_data.block_header_type.pack(bb)),
- bits=bb['bits'], # not always true
+ previous_block=self.node.net.PARENT.BLOCKHASH_FUNC(bitcoin_data.block_header_type.pack(bb)),
+ bits=self.node.pow_bits, # not always true
coinbaseflags='',
height=t['height'] + 1,
time=bb['timestamp'] + 600, # better way?
transactions=[],
+ transaction_fees=[],
merkle_link=bitcoin_data.calculate_merkle_link([None], 0),
- subsidy=net.PARENT.SUBSIDY_FUNC(self.block_height_var.value),
- last_update=self.bitcoind_work.value['last_update'],
+ subsidy=self.node.pow_subsidy,
+ last_update=self.node.bitcoind_work.value['last_update'],
)
self.current_work.set(t)
- self.bitcoind_work.changed.watch(lambda _: compute_work())
- self.best_block_header.changed.watch(lambda _: compute_work())
+ self.node.bitcoind_work.changed.watch(lambda _: compute_work())
+ self.node.best_block_header.changed.watch(lambda _: compute_work())
compute_work()
self.new_work_event = variable.Event()
if any(before[x] != after[x] for x in ['version', 'previous_block', 'bits']) or (not before['transactions'] and after['transactions']):
self.new_work_event.happened()
self.merged_work.changed.watch(lambda _: self.new_work_event.happened())
- self.best_share_var.changed.watch(lambda _: self.new_work_event.happened())
+ self.node.best_share_var.changed.watch(lambda _: self.new_work_event.happened())
+
+ def stop(self):
+ self.running = False
def get_stale_counts(self):
'''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)'''
my_shares = len(self.my_share_hashes)
my_doa_shares = len(self.my_doa_share_hashes)
- delta = self.tracker.verified.get_delta_to_last(self.best_share_var.value)
+ delta = self.tracker_view.get_delta_to_last(self.node.best_share_var.value)
my_shares_in_chain = delta.my_count + self.removed_unstales_var.value[0]
my_doa_shares_in_chain = delta.my_doa_count + self.removed_doa_unstales_var.value
orphans_recorded_in_chain = delta.my_orphan_announce_count + self.removed_unstales_var.value[1]
return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
- def get_user_details(self, request):
- user = request.getUser() if request.getUser() is not None else ''
-
+ def get_user_details(self, user):
desired_pseudoshare_target = None
if '+' in user:
user, desired_pseudoshare_difficulty_str = user.rsplit('+', 1)
except:
pass
- if random.uniform(0, 100) < self.worker_fee:
- pubkey_hash = self.my_pubkey_hash
- else:
- try:
- pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, self.net.PARENT)
- except: # XXX blah
- pubkey_hash = self.my_pubkey_hash
+ pubkey = self.my_pubkey
- return user, pubkey_hash, desired_share_target, desired_pseudoshare_target
+ return user, pubkey, desired_share_target, desired_pseudoshare_target
- def preprocess_request(self, request):
- user, pubkey_hash, desired_share_target, desired_pseudoshare_target = self.get_user_details(request)
- return pubkey_hash, desired_share_target, desired_pseudoshare_target
+ def preprocess_request(self, user):
+ user, pubkey, desired_share_target, desired_pseudoshare_target = self.get_user_details(user)
+ return pubkey, desired_share_target, desired_pseudoshare_target
- def get_work(self, pubkey_hash, desired_share_target, desired_pseudoshare_target):
- if len(self.p2p_node.peers) == 0 and self.net.PERSIST:
+ def get_work(self, pubkey, desired_share_target, desired_pseudoshare_target):
+ if (self.node.p2p_node is None or len(self.node.p2p_node.peers) == 0) and self.node.net.PERSIST:
raise jsonrpc.Error_for_code(-12345)(u'p2pool is not connected to any peers')
- if self.best_share_var.value is None and self.net.PERSIST:
+ if self.node.best_share_var.value is None and self.node.net.PERSIST:
raise jsonrpc.Error_for_code(-12345)(u'p2pool is downloading shares')
if time.time() > self.current_work.value['last_update'] + 60:
raise jsonrpc.Error_for_code(-12345)(u'lost contact with bitcoind')
mm_data = ''
mm_later = []
+ tx_hashes = [bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in self.current_work.value['transactions']]
+ tx_map = dict(zip(tx_hashes, self.current_work.value['transactions']))
+
+ if self.node.best_share_var.value is None:
+ share_type = p2pool_data.Share
+ else:
+ previous_share = self.node.tracker.items[self.node.best_share_var.value]
+ previous_share_type = type(previous_share)
+
+ if previous_share_type.SUCCESSOR is None or self.node.tracker.get_height(previous_share.hash) < self.node.net.CHAIN_LENGTH:
+ share_type = previous_share_type
+ else:
+ successor_type = previous_share_type.SUCCESSOR
+
+ counts = p2pool_data.get_desired_version_counts(self.node.tracker,
+ self.node.tracker.get_nth_parent_hash(previous_share.hash, self.node.net.CHAIN_LENGTH*9//10), self.node.net.CHAIN_LENGTH//10)
+ # Share -> NewShare only valid if 85% of hashes in [net.CHAIN_LENGTH*9//10, net.CHAIN_LENGTH] for new version
+ if counts.get(successor_type.VERSION, 0) > sum(counts.itervalues())*95//100:
+ share_type = successor_type
+ else:
+ share_type = previous_share_type
+
if True:
- share_info, generate_tx = p2pool_data.Share.generate_transaction(
- tracker=self.tracker,
+ subsidy = self.node.net.PARENT.SUBSIDY_FUNC(self.current_work.value['bits'].target)
+
+ share_info, gentx, other_transaction_hashes, get_share = share_type.generate_transaction(
+ tracker=self.node.tracker,
share_data=dict(
- previous_share_hash=self.best_share_var.value,
+ previous_share_hash=self.node.best_share_var.value,
coinbase=(script.create_push_script([
self.current_work.value['height'],
] + ([mm_data] if mm_data else []) + [
]) + self.current_work.value['coinbaseflags'])[:100],
nonce=random.randrange(2**32),
- pubkey_hash=pubkey_hash,
+ pubkey=pubkey,
subsidy=self.current_work.value['subsidy'],
donation=math.perfect_round(65535*self.donation_percentage/100),
stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
'doa' if doas > doas_recorded_in_chain else
None
)(*self.get_stale_counts()),
- desired_version=3,
+ desired_version=11,
),
block_target=self.current_work.value['bits'].target,
desired_timestamp=int(time.time() + 0.5),
desired_target=desired_share_target,
ref_merkle_link=dict(branch=[], index=0),
- net=self.net,
+ desired_other_transaction_hashes_and_fees=zip(tx_hashes, self.current_work.value['transaction_fees']),
+ net=self.node.net,
+ known_txs=tx_map,
+ base_subsidy=subsidy
)
+ packed_gentx = bitcoin_data.tx_type.pack(gentx)
+ other_transactions = [tx_map[tx_hash] for tx_hash in other_transaction_hashes]
+
mm_later = [(dict(aux_work, target=aux_work['target'] if aux_work['target'] != 'p2pool' else share_info['bits'].target), index, hashes) for aux_work, index, hashes in mm_later]
if desired_pseudoshare_target is None:
target = max(target, share_info['bits'].target)
for aux_work, index, hashes in mm_later:
target = max(target, aux_work['target'])
- target = math.clip(target, self.net.PARENT.SANE_TARGET_RANGE)
-
- transactions = [generate_tx] + list(self.current_work.value['transactions'])
- packed_generate_tx = bitcoin_data.tx_type.pack(generate_tx)
- merkle_root = bitcoin_data.check_merkle_link(bitcoin_data.hash256(packed_generate_tx), self.current_work.value['merkle_link'])
+ target = math.clip(target, self.node.net.PARENT.SANE_TARGET_RANGE)
getwork_time = time.time()
lp_count = self.new_work_event.times
- merkle_link = self.current_work.value['merkle_link']
+ merkle_link = bitcoin_data.calculate_merkle_link([None] + other_transaction_hashes, 0)
print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % (
bitcoin_data.target_to_difficulty(target),
bitcoin_data.target_to_difficulty(share_info['bits'].target),
- self.current_work.value['subsidy']*1e-8, self.net.PARENT.SYMBOL,
+ self.current_work.value['subsidy']*1e-6, self.node.net.PARENT.SYMBOL,
len(self.current_work.value['transactions']),
)
- bits = self.current_work.value['bits']
- previous_block = self.current_work.value['previous_block']
- ba = bitcoin_getwork.BlockAttempt(
+ ba = dict(
version=min(self.current_work.value['version'], 2),
previous_block=self.current_work.value['previous_block'],
- merkle_root=merkle_root,
- timestamp=self.current_work.value['time'],
+ merkle_link=merkle_link,
+ coinb1=packed_gentx[:-4-4],
+ coinb2=packed_gentx[-4:],
+ timestamp=gentx['timestamp'],
bits=self.current_work.value['bits'],
share_target=target,
)
received_header_hashes = set()
- def got_response(header, request):
- header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header))
- pow_hash = self.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
+ def got_response(header, user, coinbase_nonce):
+ assert len(coinbase_nonce) == self.COINBASE_NONCE_LENGTH == 4
+ new_packed_gentx = packed_gentx[:-4-4] + coinbase_nonce + packed_gentx[-4:] if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else packed_gentx
+ new_gentx = bitcoin_data.tx_type.unpack(new_packed_gentx) if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else gentx
+
+ header_hash = self.node.net.PARENT.BLOCKHASH_FUNC(bitcoin_data.block_header_type.pack(header))
+ pow_hash = self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
try:
+ if header['timestamp'] > new_gentx['timestamp'] + 3600:
+ print
+ print header['timestamp'], '>', new_gentx['timestamp'] + 3600
+ print 'Coinbase timestamp is too early!'
+ print
+
+ return
+
+ if header['timestamp'] < new_gentx['timestamp']:
+ print
+ print header['timestamp'], '<', new_gentx['timestamp']
+ print 'Block header timestamp is before coinbase timestamp!'
+ print
+ return
+
if pow_hash <= header['bits'].target or p2pool.DEBUG:
- self.submit_block(dict(header=header, txs=transactions), ignore_failure=False)
+ helper.submit_block(dict(header=header, txs=[new_gentx] + other_transactions, signature=''), False, self.node.factory, self.node.bitcoind, self.node.bitcoind_work, self.node.net)
if pow_hash <= header['bits'].target:
print
- print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
+ print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
print
except:
log.err(None, 'Error while processing potential block:')
- user, _, _, _ = self.get_user_details(request)
- assert header['merkle_root'] == merkle_root
- assert header['previous_block'] == previous_block
- assert header['bits'] == bits
+ user, _, _, _ = self.get_user_details(user)
+ assert header['previous_block'] == ba['previous_block']
+ assert header['merkle_root'] == bitcoin_data.check_merkle_link(bitcoin_data.hash256(new_packed_gentx), merkle_link)
+ assert header['bits'] == ba['bits']
on_time = self.new_work_event.times == lp_count
pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
bitcoin_data.aux_pow_type.pack(dict(
merkle_tx=dict(
- tx=transactions[0],
+ tx=new_gentx,
block_hash=header_hash,
merkle_link=merkle_link,
),
log.err(None, 'Error while processing merged mining POW:')
if pow_hash <= share_info['bits'].target and header_hash not in received_header_hashes:
- min_header = dict(header);del min_header['merkle_root']
- hash_link = p2pool_data.prefix_to_hash_link(packed_generate_tx[:-32-4], p2pool_data.Share.gentx_before_refhash)
- share = p2pool_data.Share(self.net, None, dict(
- min_header=min_header, share_info=share_info, hash_link=hash_link,
- ref_merkle_link=dict(branch=[], index=0),
- ), merkle_link=merkle_link, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
+ share = get_share(header, pack.IntType(32).unpack(coinbase_nonce))
print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
- request.getUser(),
+ user,
p2pool_data.format_hash(share.hash),
p2pool_data.format_hash(share.previous_hash),
time.time() - getwork_time,
if not on_time:
self.my_doa_share_hashes.add(share.hash)
- self.tracker.add(share)
- if not p2pool.DEBUG:
- self.tracker.verified.add(share)
- self.set_best_share()
+ self.node.tracker.add(share)
+ self.node.set_best_share()
try:
- if pow_hash <= header['bits'].target or p2pool.DEBUG:
- self.broadcast_share(share.hash)
+ if (pow_hash <= header['bits'].target or p2pool.DEBUG) and self.node.p2p_node is not None:
+ self.node.p2p_node.broadcast_share(share.hash)
except:
log.err(None, 'Error forwarding block solution:')
self.share_received.happened(bitcoin_data.target_to_average_attempts(share.target), not on_time)
if pow_hash > target:
- print 'Worker %s submitted share with hash > target:' % (request.getUser(),)
+ print 'Worker %s submitted share with hash > target:' % (user,)
print ' Hash: %56x' % (pow_hash,)
print ' Target: %56x' % (target,)
elif header_hash in received_header_hashes:
- print >>sys.stderr, 'Worker %s @ %s submitted share more than once!' % (request.getUser(), request.getClientIP())
+ print >>sys.stderr, 'Worker %s submitted share more than once!' % (user,)
else:
received_header_hashes.add(header_hash)