from __future__ import division
import hashlib
+import random
+import warnings
import p2pool
from p2pool.util import math, pack
def hash160(data):
return pack.IntType(160).unpack(hashlib.new('ripemd160', hashlib.sha256(data).digest()).digest())
+def scrypt(data):
+ return pack.IntType(256).unpack(__import__('ltc_scrypt').getPoWHash(data))
+
class ChecksummedType(pack.Type):
def __init__(self, inner, checksum_func=lambda data: hashlib.sha256(hashlib.sha256(data).digest()).digest()[:4]):
self.inner = inner
tx_type = pack.ComposedType([
('version', pack.IntType(32)),
+ ('timestamp', pack.IntType(32)), # txn timestamp
('tx_ins', pack.ListType(pack.ComposedType([
('previous_output', pack.PossiblyNoneType(dict(hash=0, index=2**32 - 1), pack.ComposedType([
('hash', pack.IntType(256)),
block_type = pack.ComposedType([
('header', block_header_type),
('txs', pack.ListType(tx_type)),
+ ('signature', pack.VarStrType()), # header signature field
])
# merged mining
def calculate_merkle_link(hashes, index):
# XXX optimize this
- hash_list = [(h, i == index, []) for i, h in enumerate(hashes)]
+ hash_list = [(lambda _h=h: _h, i == index, []) for i, h in enumerate(hashes)]
while len(hash_list) > 1:
hash_list = [
(
- hash256(merkle_record_type.pack(dict(left=left, right=right))),
+ lambda _left=left, _right=right: hash256(merkle_record_type.pack(dict(left=_left(), right=_right()))),
left_f or right_f,
(left_l if left_f else right_l) + [dict(side=1, hash=right) if left_f else dict(side=0, hash=left)],
)
zip(hash_list[::2], hash_list[1::2] + [hash_list[::2][-1]])
]
- res = [x['hash'] for x in hash_list[0][2]]
+ res = [x['hash']() for x in hash_list[0][2]]
assert hash_list[0][1]
if p2pool.DEBUG:
- assert check_merkle_link(hashes[index], dict(branch=res, index=index)) == hash_list[0][0]
+ new_hashes = [random.randrange(2**256) if x is None else x
+ for x in hashes]
+ assert check_merkle_link(new_hashes[index], dict(branch=res, index=index)) == merkle_hash(new_hashes)
assert index == sum(k*2**i for i, k in enumerate([1-x['side'] for x in hash_list[0][2]]))
return dict(branch=res, index=index)
# targets
def target_to_average_attempts(target):
+ assert 0 <= target and isinstance(target, (int, long)), target
+ if target >= 2**256: warnings.warn('target >= 2**256!')
return 2**256//(target + 1)
+def average_attempts_to_target(average_attempts):
+ assert average_attempts > 0
+ return min(int(2**256/average_attempts - 1 + 0.5), 2**256-1)
+
def target_to_difficulty(target):
+ assert 0 <= target and isinstance(target, (int, long)), target
+ if target >= 2**256: warnings.warn('target >= 2**256!')
return (0xffff0000 * 2**(256-64) + 1)/(target + 1)
def difficulty_to_target(difficulty):
- return (0xffff0000 * 2**(256-64) + 1)/difficulty - 1
-
-# tx
-
-def tx_get_sigop_count(tx):
- return sum(script.get_sigop_count(txin['script']) for txin in tx['tx_ins']) + sum(script.get_sigop_count(txout['script']) for txout in tx['tx_outs'])
+ assert difficulty >= 0
+ if difficulty == 0: return 2**256-1
+ return min(int((0xffff0000 * 2**(256-64) + 1)/difficulty - 1 + 0.5), 2**256-1)
# human addresses
raise ValueError('address not for this net!')
return x['pubkey_hash']
+def address_to_script(address, net):
+ x = human_address_type.unpack(base58_decode(address))
+
+ print x['pubkey_hash']
+
+ if x['version'] != net.ADDRESS_VERSION:
+ raise ValueError('address not for this net!')
+ return '\x76\xa9' + ('\x14' + pack.IntType(160).pack(x['pubkey_hash'])) + '\x88\xac'
+
+
# transactions
def pubkey_to_script2(pubkey):