raise EarlyEnd()
return data2, (data, pos + length)
+def size((data, pos)):
+ return len(data) - pos
+
class Type(object):
# the same data can have only one unpacked representation, but multiple packed binary representations
def target_to_average_attempts(target):
return 2**256//(target + 1)
+# tx
+
+def tx_get_sigop_count(tx):
+ return sum(script.get_sigop_count(txin['script']) for txin in tx['tx_ins']) + sum(script.get_sigop_count(txout['script']) for txout in tx['tx_outs'])
+
# human addresses
human_address_type = ChecksummedType(ComposedType([
--- /dev/null
+from p2pool.bitcoin import data as bitcoin_data
+from p2pool.util import bases
+
+def reads_nothing(f):
+ return '', f
+def protoPUSH(length):
+ return lambda f: bitcoin_data.read(f, length)
+def protoPUSHDATA(size_len):
+ def _(f):
+ length_str, f = bitcoin_data.read(f, size_len)
+ length = bases.string_to_natural(length_str[::-1])
+ data, f = bitcoin_data.read(f, length)
+ return data, f
+ return _
+
+opcodes = {}
+for i in xrange(256):
+ opcodes[i] = "UNK_" + str(i), reads_nothing
+
+opcodes[0] = '0', reads_nothing
+for i in xrange(1, 76):
+ opcodes[i] = 'PUSH%i' % i, protoPUSH(i)
+opcodes[76] = 'PUSHDATA1', protoPUSHDATA(1)
+opcodes[77] = 'PUSHDATA2', protoPUSHDATA(2)
+opcodes[78] = 'PUSHDATA4', protoPUSHDATA(4)
+opcodes[79] = '-1', reads_nothing
+for i in xrange(81, 97):
+ opcodes[i] = str(i - 80), reads_nothing
+
+opcodes[172] = 'CHECKSIG', reads_nothing
+opcodes[173] = 'CHECKSIGVERIFY', reads_nothing
+opcodes[174] = 'CHECKMULTISIG', reads_nothing
+opcodes[175] = 'CHECKMULTISIGVERIFY', reads_nothing
+
+def parse(script):
+ f = script, 0
+ while bitcoin_data.size(f):
+ opcode_str, f = bitcoin_data.read(f, 1)
+ opcode = ord(opcode_str)
+ opcode_name, read_func = opcodes[opcode]
+ opcode_arg, f = read_func(f)
+ yield opcode_name, opcode_arg
+
+def get_sigop_count(script):
+ weights = {
+ 'CHECKSIG': 1,
+ 'CHECKSIGVERIFY': 1,
+ 'CHECKMULTISIG': 20,
+ 'CHECKMULTISIGVERIFY': 20,
+ }
+ return sum(weights.get(opcode_name, 0) for opcode_name, opcode_arg in parse(script))
+
+if __name__ == '__main__':
+ script = '76 A9 14 89 AB CD EF AB BA AB BA AB BA AB BA AB BA AB BA AB BA AB BA 88 AC'.replace(' ', '').decode('hex')
+ for l in parse(script):
+ print l
+ print get_sigop_count(script)
from twisted.python import log
from p2pool.bitcoin import data as bitcoin_data
+from p2pool.bitcoin import script
from p2pool.util import memoize, expiring_dict, math, skiplist
class CompressedList(bitcoin_data.Type):
print 'targ', hex(self.target2)
raise ValueError('not enough work!')
+ if script.get_sigop_count(self.new_script) > 1:
+ raise ValueError('too many sigops!')
self.time_seen = time.time()
self.shared = False
if self.other_txs is not None:
if bitcoin_data.merkle_hash([gentx] + self.other_txs) != self.header['merkle_root']:
raise ValueError('''gentx doesn't match header via other_txs''')
+
+ if len(bitcoin_data.block_type.pack(dict(header=self.header, txs=[gentx] + self.other_txs))) > 1000000 - 1000:
+ raise ValueError('''block size too large''')
self.gentx = gentx
amounts[net.SCRIPT] = amounts.get(net.SCRIPT, 0) + subsidy*1//200
amounts[net.SCRIPT] = amounts.get(net.SCRIPT, 0) + subsidy - sum(amounts.itervalues()) # collect any extra
- dests = sorted(amounts.iterkeys(), key=lambda script: (script == new_script, script))
+ pre_dests = sorted(amounts.iterkeys(), key=lambda script: (amounts[script], script))
+ pre_dests = pre_dests[-4000:] # block length limit, unlikely to ever be hit
+
+ dests = sorted(pre_dests, key=lambda script: (script == new_script, script))
assert dests[-1] == new_script
return dict(
# for each overall head, attempt verification
# if it fails, attempt on parent, and repeat
# if no successful verification because of lack of parents, request parent
- for head in self.heads:
+ bads = set()
+ for head in set(self.heads) - set(self.verified.heads):
head_height, last = self.get_height_and_last(head)
for share in itertools.islice(self.get_chain_known(head), None if last is None else max(0, head_height - self.net.CHAIN_LENGTH)):
if self.attempt_verify(share, now):
break
+ if share.hash in self.heads:
+ bads.add(share.hash)
else:
if last is not None:
desired.add((self.shares[random.choice(list(self.reverse_shares[last]))].peer, last))
+ for bad in bads:
+ assert bad not in self.verified.shares
+ assert bad in self.heads
+ self.remove(bad)
# try to get at least CHAIN_LENGTH height for each verified head, requesting parents if needed
for head in list(self.verified.heads):
merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
def compute(state, all_targets):
- extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
- # XXX limit to merkle_branch and block max size - 1000000 byte
- # and sigops
+ pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
+ pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
+ extra_txs = []
+ size = 0
+ for tx in pre_extra_txs:
+ this_size = bitcoin_data.tx_type.pack(tx)
+ if size + this_size > 500000:
+ break
+ extra_txs.append(tx)
+ size += this_size
+ # XXX check sigops!
+ # XXX assuming generate_tx is smallish here..
generate_tx = p2pool.generate_transaction(
tracker=tracker,
previous_share_hash=state['best_share_hash'],
net=args.net,
)
print 'Generating!', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target2']//1000000
- print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target2'],)
+ #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target2'],)
#, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
transactions = [generate_tx] + [tx.tx for tx in extra_txs]
merkle_root = bitcoin.data.merkle_hash(transactions)
reactor.stop()
def run():
+ if __debug__:
+ defer.setDebugging(True)
+
parser = argparse.ArgumentParser(description='p2pool (version %s)' % (__version__,))
parser.add_argument('--version', action='version', version=__version__)
parser.add_argument('--testnet',