from __future__ import division
import hashlib
+import random
+import warnings
import p2pool
from p2pool.util import math, pack
def hash160(data):
return pack.IntType(160).unpack(hashlib.new('ripemd160', hashlib.sha256(data).digest()).digest())
+def scrypt(data):
+ return pack.IntType(256).unpack(__import__('ltc_scrypt').getPoWHash(data))
+
class ChecksummedType(pack.Type):
- def __init__(self, inner):
+ def __init__(self, inner, checksum_func=lambda data: hashlib.sha256(hashlib.sha256(data).digest()).digest()[:4]):
self.inner = inner
+ self.checksum_func = checksum_func
def read(self, file):
obj, file = self.inner.read(file)
data = self.inner.pack(obj)
- checksum, file = pack.read(file, 4)
- if checksum != hashlib.sha256(hashlib.sha256(data).digest()).digest()[:4]:
+ calculated_checksum = self.checksum_func(data)
+ checksum, file = pack.read(file, len(calculated_checksum))
+ if checksum != calculated_checksum:
raise ValueError('invalid checksum')
return obj, file
def write(self, file, item):
data = self.inner.pack(item)
- return (file, data), hashlib.sha256(hashlib.sha256(data).digest()).digest()[:4]
+ return (file, data), self.checksum_func(data)
class FloatingInteger(object):
__slots__ = ['bits', '_target']
tx_type = pack.ComposedType([
('version', pack.IntType(32)),
+ ('timestamp', pack.IntType(32)), # txn timestamp
('tx_ins', pack.ListType(pack.ComposedType([
('previous_output', pack.PossiblyNoneType(dict(hash=0, index=2**32 - 1), pack.ComposedType([
('hash', pack.IntType(256)),
('lock_time', pack.IntType(32)),
])
-merkle_branch_type = pack.ListType(pack.IntType(256))
+merkle_link_type = pack.ComposedType([
+ ('branch', pack.ListType(pack.IntType(256))),
+ ('index', pack.IntType(32)),
+])
merkle_tx_type = pack.ComposedType([
('tx', tx_type),
('block_hash', pack.IntType(256)),
- ('merkle_branch', merkle_branch_type),
- ('index', pack.IntType(32)),
+ ('merkle_link', merkle_link_type),
])
block_header_type = pack.ComposedType([
block_type = pack.ComposedType([
('header', block_header_type),
('txs', pack.ListType(tx_type)),
+ ('signature', pack.VarStrType()), # header signature field
])
# merged mining
aux_pow_type = pack.ComposedType([
('merkle_tx', merkle_tx_type),
- ('merkle_branch', merkle_branch_type),
- ('index', pack.IntType(32)),
+ ('merkle_link', merkle_link_type),
('parent_block_header', block_header_type),
])
return 0
hash_list = list(hashes)
while len(hash_list) > 1:
- hash_list = [hash256(merkle_record_type.pack(dict(left=left, right=left if right is None else right)))
- for left, right in zip(hash_list[::2], hash_list[1::2] + [None])]
+ hash_list = [hash256(merkle_record_type.pack(dict(left=left, right=right)))
+ for left, right in zip(hash_list[::2], hash_list[1::2] + [hash_list[::2][-1]])]
return hash_list[0]
-def calculate_merkle_branch(hashes, index):
+def calculate_merkle_link(hashes, index):
# XXX optimize this
- hash_list = [(h, i == index, []) for i, h in enumerate(hashes)]
+ hash_list = [(lambda _h=h: _h, i == index, []) for i, h in enumerate(hashes)]
while len(hash_list) > 1:
hash_list = [
(
- hash256(merkle_record_type.pack(dict(left=left, right=right))),
+ lambda _left=left, _right=right: hash256(merkle_record_type.pack(dict(left=_left(), right=_right()))),
left_f or right_f,
(left_l if left_f else right_l) + [dict(side=1, hash=right) if left_f else dict(side=0, hash=left)],
)
zip(hash_list[::2], hash_list[1::2] + [hash_list[::2][-1]])
]
- res = [x['hash'] for x in hash_list[0][2]]
+ res = [x['hash']() for x in hash_list[0][2]]
assert hash_list[0][1]
if p2pool.DEBUG:
- assert check_merkle_branch(hashes[index], index, res) == hash_list[0][0]
+ new_hashes = [random.randrange(2**256) if x is None else x
+ for x in hashes]
+ assert check_merkle_link(new_hashes[index], dict(branch=res, index=index)) == merkle_hash(new_hashes)
assert index == sum(k*2**i for i, k in enumerate([1-x['side'] for x in hash_list[0][2]]))
- return res
+ return dict(branch=res, index=index)
-def check_merkle_branch(tip_hash, index, merkle_branch):
+def check_merkle_link(tip_hash, link):
+ if link['index'] >= 2**len(link['branch']):
+ raise ValueError('index too large')
return reduce(lambda c, (i, h): hash256(merkle_record_type.pack(
- dict(left=h, right=c) if 2**i & index else
+ dict(left=h, right=c) if (link['index'] >> i) & 1 else
dict(left=c, right=h)
- )), enumerate(merkle_branch), tip_hash)
+ )), enumerate(link['branch']), tip_hash)
# targets
def target_to_average_attempts(target):
+ assert 0 <= target and isinstance(target, (int, long)), target
+ if target >= 2**256: warnings.warn('target >= 2**256!')
return 2**256//(target + 1)
+def average_attempts_to_target(average_attempts):
+ assert average_attempts > 0
+ return min(int(2**256/average_attempts - 1 + 0.5), 2**256-1)
+
def target_to_difficulty(target):
+ assert 0 <= target and isinstance(target, (int, long)), target
+ if target >= 2**256: warnings.warn('target >= 2**256!')
return (0xffff0000 * 2**(256-64) + 1)/(target + 1)
-# tx
-
-def tx_get_sigop_count(tx):
- return sum(script.get_sigop_count(txin['script']) for txin in tx['tx_ins']) + sum(script.get_sigop_count(txout['script']) for txout in tx['tx_outs'])
+def difficulty_to_target(difficulty):
+ assert difficulty >= 0
+ if difficulty == 0: return 2**256-1
+ return min(int((0xffff0000 * 2**(256-64) + 1)/difficulty - 1 + 0.5), 2**256-1)
# human addresses
raise ValueError('address not for this net!')
return x['pubkey_hash']
+def address_to_script(address, net):
+ x = human_address_type.unpack(base58_decode(address))
+
+ print x['pubkey_hash']
+
+ if x['version'] != net.ADDRESS_VERSION:
+ raise ValueError('address not for this net!')
+ return '\x76\xa9' + ('\x14' + pack.IntType(160).pack(x['pubkey_hash'])) + '\x88\xac'
+
+
# transactions
def pubkey_to_script2(pubkey):