from twisted.python import log
import bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
-from bitcoin import worker_interface
+from bitcoin import script, worker_interface
from util import jsonrpc, variable, deferral, math, pack
import p2pool, p2pool.data as p2pool_data
self.current_work = variable.Variable(None)
def compute_work():
- t = dict(self.bitcoind_work.value)
-
+ t = self.bitcoind_work.value
bb = self.best_block_header.value
if bb is not None and bb['previous_block'] == t['previous_block'] and net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(bb)) <= t['bits'].target:
print 'Skipping from block %x to block %x!' % (bb['previous_block'],
bitcoin_data.hash256(bitcoin_data.block_header_type.pack(bb)))
t = dict(
+ version=bb['version'],
previous_block=bitcoin_data.hash256(bitcoin_data.block_header_type.pack(bb)),
bits=bb['bits'], # not always true
coinbaseflags='',
+ height=t['height'] + 1,
time=bb['timestamp'] + 600, # better way?
transactions=[],
merkle_link=bitcoin_data.calculate_merkle_link([None], 0),
subsidy=net.PARENT.SUBSIDY_FUNC(self.block_height_var.value),
- clock_offset=self.bitcoind_work.value['clock_offset'],
last_update=self.bitcoind_work.value['last_update'],
)
self.new_work_event = variable.Event()
@self.current_work.transitioned.watch
def _(before, after):
- # trigger LP if previous_block/bits changed or transactions changed from nothing
- if any(before[x] != after[x] for x in ['previous_block', 'bits']) or (not before['transactions'] and after['transactions']):
+ # trigger LP if version/previous_block/bits changed or transactions changed from nothing
+ if any(before[x] != after[x] for x in ['version', 'previous_block', 'bits']) or (not before['transactions'] and after['transactions']):
self.new_work_event.happened()
self.merged_work.changed.watch(lambda _: self.new_work_event.happened())
self.best_share_var.changed.watch(lambda _: self.new_work_event.happened())
def get_work(self, pubkey_hash, desired_share_target, desired_pseudoshare_target):
if len(self.p2p_node.peers) == 0 and self.net.PERSIST:
- raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
+ raise jsonrpc.Error_for_code(-12345)(u'p2pool is not connected to any peers')
if self.best_share_var.value is None and self.net.PERSIST:
- raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
+ raise jsonrpc.Error_for_code(-12345)(u'p2pool is downloading shares')
if time.time() > self.current_work.value['last_update'] + 60:
- raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
+ raise jsonrpc.Error_for_code(-12345)(u'lost contact with bitcoind')
if self.merged_work.value:
tree, size = bitcoin_data.make_auxpow_tree(self.merged_work.value)
mm_data = ''
mm_later = []
+ tx_hashes = [bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in self.current_work.value['transactions']]
+ tx_map = dict(zip(tx_hashes, self.current_work.value['transactions']))
+
+ share_type = p2pool_data.NewShare
+ if self.best_share_var.value is not None:
+ previous_share = self.tracker.items[self.best_share_var.value]
+ if isinstance(previous_share, p2pool_data.Share):
+ # Share -> NewShare only valid if 85% of hashes in [net.CHAIN_LENGTH*9//10, net.CHAIN_LENGTH] for new version
+ if self.tracker.get_height(previous_share.hash) < self.net.CHAIN_LENGTH:
+ share_type = p2pool_data.Share
+ else:
+ counts = p2pool_data.get_desired_version_counts(self.tracker,
+ self.tracker.get_nth_parent_hash(previous_share.hash, self.net.CHAIN_LENGTH*9//10), self.net.CHAIN_LENGTH//10)
+ if counts.get(p2pool_data.NewShare.VERSION, 0) < sum(counts.itervalues())*95//100:
+ share_type = p2pool_data.Share
+
if True:
- share_info, generate_tx = p2pool_data.Share.generate_transaction(
+ share_info, gentx, other_transaction_hashes, get_share = share_type.generate_transaction(
tracker=self.tracker,
share_data=dict(
previous_share_hash=self.best_share_var.value,
- coinbase=(mm_data + self.current_work.value['coinbaseflags'])[:100],
+ coinbase=(script.create_push_script([
+ self.current_work.value['height'],
+ ] + ([mm_data] if mm_data else []) + [
+ ]) + self.current_work.value['coinbaseflags'])[:100],
nonce=random.randrange(2**32),
pubkey_hash=pubkey_hash,
subsidy=self.current_work.value['subsidy'],
'doa' if doas > doas_recorded_in_chain else
None
)(*self.get_stale_counts()),
- desired_version=3,
+ desired_version=p2pool_data.NewShare.VERSION,
),
block_target=self.current_work.value['bits'].target,
- desired_timestamp=int(time.time() - self.current_work.value['clock_offset']),
+ desired_timestamp=int(time.time() + 0.5),
desired_target=desired_share_target,
ref_merkle_link=dict(branch=[], index=0),
+ desired_other_transaction_hashes=tx_hashes,
net=self.net,
+ known_txs=tx_map,
)
+ transactions = [gentx] + [tx_map[tx_hash] for tx_hash in other_transaction_hashes]
+
mm_later = [(dict(aux_work, target=aux_work['target'] if aux_work['target'] != 'p2pool' else share_info['bits'].target), index, hashes) for aux_work, index, hashes in mm_later]
if desired_pseudoshare_target is None:
target = max(target, aux_work['target'])
target = math.clip(target, self.net.PARENT.SANE_TARGET_RANGE)
- transactions = [generate_tx] + list(self.current_work.value['transactions'])
- packed_generate_tx = bitcoin_data.tx_type.pack(generate_tx)
- merkle_root = bitcoin_data.check_merkle_link(bitcoin_data.hash256(packed_generate_tx), self.current_work.value['merkle_link'])
-
getwork_time = time.time()
lp_count = self.new_work_event.times
- merkle_link = self.current_work.value['merkle_link']
+ merkle_link = bitcoin_data.calculate_merkle_link([bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in transactions], 0)
print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % (
bitcoin_data.target_to_difficulty(target),
len(self.current_work.value['transactions']),
)
- bits = self.current_work.value['bits']
- previous_block = self.current_work.value['previous_block']
ba = bitcoin_getwork.BlockAttempt(
- version=1,
+ version=min(self.current_work.value['version'], 2),
previous_block=self.current_work.value['previous_block'],
- merkle_root=merkle_root,
+ merkle_root=bitcoin_data.check_merkle_link(bitcoin_data.hash256(bitcoin_data.tx_type.pack(transactions[0])), merkle_link),
timestamp=self.current_work.value['time'],
bits=self.current_work.value['bits'],
share_target=target,
log.err(None, 'Error while processing potential block:')
user, _, _, _ = self.get_user_details(request)
- assert header['merkle_root'] == merkle_root
- assert header['previous_block'] == previous_block
- assert header['bits'] == bits
+ assert header['previous_block'] == ba.previous_block
+ assert header['merkle_root'] == ba.merkle_root
+ assert header['bits'] == ba.bits
on_time = self.new_work_event.times == lp_count
log.err(None, 'Error while processing merged mining POW:')
if pow_hash <= share_info['bits'].target and header_hash not in received_header_hashes:
- min_header = dict(header);del min_header['merkle_root']
- hash_link = p2pool_data.prefix_to_hash_link(packed_generate_tx[:-32-4], p2pool_data.Share.gentx_before_refhash)
- share = p2pool_data.Share(self.net, None, dict(
- min_header=min_header, share_info=share_info, hash_link=hash_link,
- ref_merkle_link=dict(branch=[], index=0),
- ), merkle_link=merkle_link, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
+ share = get_share(header, transactions)
print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
request.getUser(),