From: forrest Date: Sat, 9 Jul 2011 17:57:17 +0000 (+0000) Subject: working ... sorry for bad log messages\! X-Git-Tag: 0.8.2~399 X-Git-Url: https://git.novaco.in/?a=commitdiff_plain;h=4cf120492b2750b2e81bade672b52393005a0fee;p=p2pool.git working ... sorry for bad log messages\! git-svn-id: svn://forre.st/p2pool@1369 470744a7-cac9-478e-843e-5ec1b25c69e8 --- diff --git a/p2pool/bitcoin/data.py b/p2pool/bitcoin/data.py index 59a7f8d..5d302af 100644 --- a/p2pool/bitcoin/data.py +++ b/p2pool/bitcoin/data.py @@ -3,9 +3,10 @@ from __future__ import division import struct import StringIO import hashlib +import warnings from . import base58 -from p2pool.util import bases +from p2pool.util import bases, expiring_dict, math class EarlyEnd(Exception): pass @@ -16,6 +17,14 @@ class LateEnd(Exception): class Type(object): # the same data can have only one unpacked representation, but multiple packed binary representations + #def __hash__(self): + # return hash(tuple(self.__dict__.items())) + + #def __eq__(self, other): + # if not isinstance(other, Type): + # raise NotImplementedError() + # return self.__dict__ == other.__dict__ + def _unpack(self, data): f = StringIO.StringIO(data) @@ -28,7 +37,11 @@ class Type(object): def unpack(self, data): obj = self._unpack(data) - assert self._unpack(self._pack(obj)) == obj + + data2 = self._pack(obj) + if data2 != data: + assert self._unpack(data2) == obj + return obj def _pack(self, obj): @@ -43,6 +56,7 @@ class Type(object): def pack(self, obj): data = self._pack(obj) assert self._unpack(data) == obj + return data @@ -54,10 +68,10 @@ class Type(object): def hash160(self, obj): - return ripemdsha(self.pack(obj)) + return ShortHashType().unpack(hashlib.new('ripemd160', hashlib.sha256(self.pack(obj)).digest()).digest()) def hash256(self, obj): - return doublesha(self.pack(obj)) + return HashType().unpack(hashlib.sha256(hashlib.sha256(self.pack(obj)).digest()).digest()) class VarIntType(Type): def read(self, file): @@ -141,6 +155,8 @@ class HashType(Type): def write(self, file, item): if item >= 2**256: raise ValueError("invalid hash value") + if item != 0 and item < 2**160: + warnings.warn("very low hash value - maybe you meant to use ShortHashType?") file.write(('%064x' % (item,)).decode('hex')[::-1]) class ShortHashType(Type): @@ -194,13 +210,9 @@ class IPV6AddressType(Type): raise EarlyEnd() if data[:12] != '00000000000000000000ffff'.decode('hex'): raise ValueError("ipv6 addresses not supported yet") - return '::ffff:' + '.'.join(str(ord(x)) for x in data[12:]) + return '.'.join(str(ord(x)) for x in data[12:]) def write(self, file, item): - prefix = '::ffff:' - if not item.startswith(prefix): - raise ValueError("ipv6 addresses not supported yet") - item = item[len(prefix):] bits = map(int, item.split('.')) if len(bits) != 4: raise ValueError("invalid address: %r" % (bits,)) @@ -240,6 +252,64 @@ class ChecksummedType(Type): file.write(data) file.write(hashlib.sha256(hashlib.sha256(data).digest()).digest()[:4]) +class FloatingIntegerType(Type): + def read(self, file): + data = FixedStrType(4).read(file) + target = self._bits_to_target(data) + if self._target_to_bits(target) != data: + raise ValueError("bits in non-canonical form") + return target + + def write(self, file, item): + FixedStrType(4).write(file, self._target_to_bits(item)) + + def truncate_to(self, x): + return self._bits_to_target(self._target_to_bits(x, _check=False)) + + def _bits_to_target(self, bits, _check=True): + assert len(bits) == 4, repr(bits) + target1 = self._bits_to_target1(bits) + target2 = self._bits_to_target2(bits) + if target1 != target2: + raise ValueError() + if _check: + if self._target_to_bits(target1, _check=False) != bits: + raise ValueError() + return target1 + + def _bits_to_target1(self, bits): + bits = bits[::-1] + length = ord(bits[0]) + return bases.string_to_natural((bits[1:] + "\0"*length)[:length]) + + def _bits_to_target2(self, bits): + bits = struct.unpack("> 24) - 3)) + + def _target_to_bits(self, target, _check=True): + n = bases.natural_to_string(target) + if n and ord(n[0]) >= 128: + n = "\x00" + n + bits = (chr(len(n)) + (n + 3*chr(0))[:3])[::-1] + if _check: + if self._bits_to_target(bits, _check=False) != target: + raise ValueError(repr((target, self._bits_to_target(bits, _check=False)))) + return bits + +class PossiblyNone(Type): + def __init__(self, none_value, inner): + self.none_value = none_value + self.inner = inner + + def read(self, file): + value = self.inner.read(file) + return None if value == self.none_value else value + + def write(self, file, item): + if item == self.none_value: + raise ValueError("none_value used") + self.inner.write(file, self.none_value if item is None else item) + address_type = ComposedType([ ('services', StructType(' 1: - hash_list = [doublesha(merkle_record_type.pack(dict(left=left, right=left if right is None else right))) + hash_list = [merkle_record_type.hash256(dict(left=left, right=left if right is None else right)) for left, right in zip(hash_list[::2], hash_list[1::2] + [None])] return hash_list[0] -def tx_hash(tx): - return doublesha(tx_type.pack(tx)) - -def block_hash(header): - return doublesha(block_header_type.pack(header)) - -def shift_left(n, m): - # python: :( - if m < 0: - return n >> -m - return n << m - -def bits_to_target(bits): - bits = bits[::-1] - length = ord(bits[0]) - return bases.string_to_natural((bits[1:] + "\0"*length)[:length]) - -def old_bits_to_target(bits): - return shift_left(bits & 0x00ffffff, 8 * ((bits >> 24) - 3)) - -def about_equal(a, b): - if a == b: return True - return abs(a-b)/((abs(a)+abs(b))/2) < .01 - -def compress_target_to_bits(target): # loses precision - print - print "t", target - n = bases.natural_to_string(target) - print "n", n.encode('hex') - bits = chr(len(n)) + n[:3].ljust(3, '\0') - bits = bits[::-1] - print "bits", bits.encode('hex') - print "new", bits_to_target(bits) - print "old", old_bits_to_target(struct.unpack("' % (' '.join('%s=%s' % (k, hex(v))) for k, v in self.__dict__.iteritems()) @@ -33,66 +37,73 @@ class BlockAttempt(object): def __repr__(self): return 'BlockAttempt(%s)' % (', '.join('%s=%r' % (k, v) for k, v in self.__dict__.iteritems()),) - def getwork(self, target_multiplier=1, _check=2): - target = bitcoin_data.bits_to_target(self.bits) * target_multiplier - if target >= 2**256//2**32: + def getwork(self, target=None, _check=3): + target2 = self.target if target is None else target + if target2 >= 2**256//2**32: raise ValueError("target higher than standard maximum") - previous_block2 = _reverse_chunks('%064x' % self.previous_block, 8).decode('hex') - merkle_root2 = _reverse_chunks('%064x' % self.merkle_root, 8).decode('hex') - data = struct.pack('>I32s32sIII', self.version, previous_block2, merkle_root2, self.timestamp, self.bits, 0).encode('hex') + '000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000' - - previous_block3 = ('%064x' % self.previous_block).decode('hex')[::-1] - merkle_root3 = ('%064x' % self.merkle_root).decode('hex')[::-1] - data2 = struct.pack('I32s32sIII', data[:160].decode('hex')) - previous_block = int(_reverse_chunks(previous_block.encode('hex'), 8), 16) - merkle_root = int(_reverse_chunks(merkle_root.encode('hex'), 8), 16) - return dict(version=version, previous_block=previous_block, merkle_root=merkle_root, timestamp=timestamp, bits=bits, nonce=nonce) + return bitcoin_data.block_header_type.unpack(_swap(data.decode('hex'), 4)[:80]) if __name__ == '__main__': + BlockAttempt.from_getwork({ + 'target': '0000000000000000000000000000000000000000000000f2b944000000000000', + 'midstate': '5982f893102dec03e374b472647c4f19b1b6d21ae4b2ac624f3d2f41b9719404', + 'hash1': '00000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000010000', + 'data': '0000000163930d52a5ffca79b29b95a659a302cd4e1654194780499000002274000000002e133d9e51f45bc0886d05252038e421e82bff18b67dc14b90d9c3c2f422cd5c4dd4598e1a44b9f200000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000' +}, _check=100) + BlockAttempt.from_getwork({ + "midstate" : "f4a9b048c0cb9791bc94b13ee0eec21e713963d524fd140b58bb754dd7b0955f", + "data" : "000000019a1d7342fb62090bda686b22d90f9f73d0f5c418b9c980cd0000011a00000000680b07c8a2f97ecd831f951806857e09f98a3b81cdef1fa71982934fef8dc3444e18585d1a0abbcf00000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000", + "hash1" : "00000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000010000", + "target" : "0000000000000000000000000000000000000000000000cfbb0a000000000000" + }) ba = BlockAttempt( 1, - 0x000000000000148135e10208db85abb62754341a392eab1f186aab077a831cf7, + 0x148135e10208db85abb62754341a392eab1f186aab077a831cf7, 0x534ea08be1ab529f484369344b6d5423ef5a0767db9b3ebb4e182bbb67962520, 1305759879, - 440711666, + 0x44b9f20000000000000000000000000000000000000000000000, ) + ba.getwork(2**192*5, 100) ba.getwork(1, 100) ba.getwork(10, 100) - ba.from_getwork({ - 'target': '0000000000000000000000000000000000000000000000f2b944000000000000', - 'midstate': '5982f893102dec03e374b472647c4f19b1b6d21ae4b2ac624f3d2f41b9719404', - 'hash1': '00000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000010000', - 'data': '0000000163930d52a5ffca79b29b95a659a302cd4e1654194780499000002274000000002e133d9e51f45bc0886d05252038e421e82bff18b67dc14b90d9c3c2f422cd5c4dd4598e1a44b9f200000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000' -}, _check=100) + ba.getwork() + ba.getwork(_check=100) diff --git a/p2pool/bitcoin/p2p.py b/p2pool/bitcoin/p2p.py index f68c984..aba9532 100644 --- a/p2pool/bitcoin/p2p.py +++ b/p2pool/bitcoin/p2p.py @@ -102,73 +102,6 @@ class Protocol(BaseProtocol): def use_checksum(self): return self.version >= 209 - message_version = bitcoin_data.ComposedType([ - ('version', bitcoin_data.StructType(' 1: hash_list = [ ( - bitcoin_data.doublesha(bitcoin_data.merkle_record_type.pack(dict(left=left, right=right))), + bitcoin_data.merkle_record_type.hash256(dict(left=left, right=right)), left_f or right_f, (left_l if left_f else right_l) + [dict(side=1, hash=right) if left_f else dict(side=0, hash=left)], ) @@ -52,57 +82,60 @@ def calculate_merkle_branch(txs, index): return hash_list[0][2] def check_merkle_branch(tx, branch): - hash_ = bitcoin_data.tx_hash(tx) + hash_ = bitcoin_data.tx_type.hash256(tx) for step in branch: if not step['side']: - hash_ = bitcoin_data.doublesha(bitcoin_data.merkle_record_type.pack(dict(left=step['hash'], right=hash_))) + hash_ = bitcoin_data.merkle_record_type.hash256(dict(left=step['hash'], right=hash_)) else: - hash_ = bitcoin_data.doublesha(bitcoin_data.merkle_record_type.pack(dict(left=hash_, right=step['hash']))) + hash_ = bitcoin_data.merkle_record_type.hash256(dict(left=hash_, right=step['hash'])) return hash_ -def txs_to_gentx_info(txs): +def gentx_to_share_info(gentx): return dict( - share_info=dict( - share_data=coinbase_type.unpack(txs[0]['tx_ins'][0]['script'])['share_data'], - subsidy=sum(tx_out['value'] for tx_out in txs[0]['tx_outs']), - new_script=txs[0]['tx_outs'][-1]['script'], - ), - merkle_branch=calculate_merkle_branch(txs, 0), + share_data=coinbase_type.unpack(gentx['tx_ins'][0]['script'])['share_data'], + subsidy=sum(tx_out['value'] for tx_out in gentx['tx_outs']), + new_script=gentx['tx_outs'][-1]['script'], ) -def share_info_to_gentx_and_shares(share_info, chain, net): +def share_info_to_gentx(share_info, chain, net): return generate_transaction( - previous_share2=chain.share2s[share_info['share_data']['previous_p2pool_share_hash']], + previous_share2=chain.share2s[share_info['share_data']['previous_share_hash']], nonce=share_info['share_data']['nonce'], new_script=share_info['new_script'], subsidy=share_info['subsidy'], net=net, ) -def gentx_info_to_gentx_shares_and_merkle_root(gentx_info, chain, net): - gentx, shares = share_info_to_gentx_and_shares(gentx_info['share_info'], chain, net) - return gentx, shares, check_merkle_branch(gentx, gentx_info['merkle_branch']) - class Share(object): - def __init__(self, header, txs=None, gentx_info=None): - if txs is not None: - if bitcoin_data.merkle_hash(txs) != header['merkle_root']: - raise ValueError("txs don't match header") + def __init__(self, header, share_info, merkle_branch=None, other_txs=None): + if merkle_branch is None and other_txs is None: + raise ValueError('need either merkle_branch or other_txs') + self.header = header + self.share_info = share_info + self.merkle_branch = merkle_branch + self.other_txs = other_txs - if gentx_info is None: - if txs is None: - raise ValueError('need either txs or gentx_info') - - gentx_info = txs_to_gentx_info(txs) + self.share_data = self.share_info['share_data'] + self.new_script = self.share_info['new_script'] + self.subsidy = self.share_info['subsidy'] - coinbase = gentx_info['share_info']['coinbase'] + self.previous_share_hash = self.share_data['previous_share_hash'] + self.previous_shares_hash = self.share_data['previous_shares_hash'] + self.target2 = self.share_data['target2'] - self.header = header - self.txs = txs - self.gentx_info = gentx_info - self.hash = bitcoin_data.block_hash(header) - self.previous_share_hash = coinbase['previous_p2pool_share_hash'] if coinbase['previous_p2pool_share_hash'] != 2**256 - 1 else None - self.chain_id_data = chain_id_type.pack(dict(last_p2pool_block_hash=coinbase['last_p2pool_block_hash'], bits=header['bits'])) + self.hash = bitcoin_data.block_header_type.hash256(header) + + @classmethod + def from_block(cls, block): + return cls(block['header'], gentx_to_share_info(block['txs'][0]), other_txs=block['txs'][1:]) + + @classmethod + def from_share1a(cls, share1a): + return cls(**share1a) + + @classmethod + def from_share1b(cls, share1b): + return cls(**share1b) def as_block(self): if self.txs is None: @@ -140,89 +173,149 @@ class Share2(object): def flag_shared(self): self.shared = True -def generate_transaction(last_p2pool_block_hash, previous_share2, new_script, subsidy, nonce, net): - shares = (previous_share2.shares if previous_share2 is not None else [net.SCRIPT]*net.SPREAD)[1:-1] + [new_script, new_script] +def generate_transaction(tracker, previous_share_hash, new_script, subsidy, nonce, block_target, net): + previous_share2 = tracker.shares[previous_share_hash] if previous_share_hash is not None else None + #previous_share2 = chain.shares + #previous_shares + #shares = + #shares = (previous_share2.shares if previous_share2 is not None else [net.SCRIPT]*net.SPREAD)[1:-1] + [new_script, new_script] + + chain = list(itertools.islice(tracker.get_chain(previous_share_hash), net.CHAIN_LENGTH)) + if len(chain) < 100: + target2 = bitcoin_data.FloatingIntegerType().truncate_to(2**256//2**32 - 1) + else: + attempts_per_second = sum(bitcoin_data.target_to_average_attempts(share.target) for share in itertools.islice(chain, 0, max(0, len(chain) - 1)))//(chain[0].timestamp - chain[-1].timestamp) + pre_target = 2**256*net.SHARE_PERIOD//attempts_per_second + pre_target2 = math.clip(pre_target, (previous_share2.target*9//10, previous_share2.target*11//10)) + pre_target3 = math.clip(pre_target2, (0, 2**256//2**32 - 1)) + target2 = bitcoin_data.FloatingIntegerType().truncate_to(pre_target3) + + + attempts_to_block = bitcoin_data.target_to_average_attempts(block_target) + total_weight = 0 + + class fake_share(object): + script = new_script + share = dict(target=target2) dest_weights = {} - for script in shares: - dest_weights[script] = dest_weights.get(script, 0) + 1 - total_weight = sum(dest_weights.itervalues()) + for share in itertools.chain([fake_share], itertools.islice(tracker.get_chain(previous_share_hash), net.CHAIN_LENGTH)): + weight = bitcoin_data.target_to_average_attempts(share.share['target']) + weight = max(weight, attempts_to_block - total_weight) + + dest_weights[share.script] = dest_weights.get(share.script, 0) + weight + total_weight += weight + + if total_weight == attempts_to_block: + break - amounts = dict((script, subsidy*weight*63//(64*total_weight)) for (script, weight) in dest_weights.iteritems()) - amounts[net.SCRIPT] = amounts.get(net.SCRIPT, 0) + subsidy//64 # prevent fake previous p2pool blocks + amounts = dict((script, subsidy*(199*weight)//(200*total_weight)) for (script, weight) in dest_weights.iteritems()) + amounts[net.SCRIPT] = amounts.get(net.SCRIPT, 0) + subsidy*1//200 # prevent fake previous p2pool blocks amounts[net.SCRIPT] = amounts.get(net.SCRIPT, 0) + subsidy - sum(amounts.itervalues()) # collect any extra dests = sorted(amounts.iterkeys(), key=lambda script: (script == new_script, script)) - assert dests[-1] == new_script + assert dests[-1] == new_script, dests - pre_target = sum(bitcoin_data.target_to_average_attempts(share(x ago).target) for x in xrange(1000))/(share(1000 ago).timestamp - share(1 ago).timestamp) - bits2 = bitcoin_data.compress_target_to_bits(pre_target) + previous_shares = [] # XXX return dict( version=1, tx_ins=[dict( - previous_output=dict(index=4294967295, hash=0), - sequence=4294967295, + previous_output=None, + sequence=None, script=coinbase_type.pack(dict( identifier=net.IDENTIFIER, share_data=dict( - last_p2pool_block_hash=last_p2pool_block_hash, - previous_p2pool_share_hash=previous_share2.share.hash if previous_share2 is not None else 2**256 - 1, + previous_share_hash=previous_share_hash, + previous_shares_hash=shares_type.hash256(previous_shares), nonce=nonce, - bits2=bits2, + target2=target2, ), )), )], tx_outs=[dict(value=amounts[script], script=script) for script in dests if amounts[script]], lock_time=0, - ), shares + ) class Tracker(object): def __init__(self): self.shares = {} # hash -> share - self.reverse_shares = {} # previous_hash -> share_hash + self.reverse_shares = {} # previous_share_hash -> share_hash self.heads = {} # hash -> (height, tail hash) self.heads = set() def add_share(self, share): if share.hash in self.shares: - return # XXX + return # XXX raise exception? self.shares[share.hash] = share - self.reverse_shares.setdefault(share.previous_hash, set()).add(share.hash) + self.reverse_shares.setdefault(share.previous_share_hash, set()).add(share.hash) if self.reverse_shares.get(share.hash, set()): pass # not a head else: self.heads.add(share.hash) - if share.previous_hash in self.heads: - self.heads.remove(share.previous_hash) + if share.previous_share_hash in self.heads: + self.heads.remove(share.previous_share_hash) def get_chain(self, start): share_hash_to_get = start while share_hash_to_get in self.shares: share = self.shares[share_hash_to_get] yield share - share_hash_to_get = share.previous_hash + share_hash_to_get = share.previous_share_hash - def best(self): + def get_best_share_hash(self): + if not self.heads: + return None return max(self.heads, key=self.score_chain) def score_chain(self, start): length = len(self.get_chain(start)) score = 0 - for share in itertools.islice(self.get_chain(start), 1000): + for share in itertools.islice(self.get_chain(start), self.net.CHAIN_LENGTH): score += a return (min(length, 1000), score) +class OkayTracker(Tracker): + def __init__(self): + Tracker.__init__(self) + self.okay_cache = set() + def is_okay(self, start): + ''' + Returns: + {'result': 'okay', verified_height: ...} # if share has an okay parent or if share has CHAIN_LENGTH children and CHAIN_LENTH parents that it verified with + {'result': 'needs_parent', 'parent_hash': ...} # if share doesn't have CHAIN_LENGTH parents + {'result': 'needs_share_shares', 'share_hash': ...} # if share has CHAIN_LENGTH children and needs its shares to + {'result': 'not_okay'} # if the share has a not okay parent or if the share has an okay parent and failed validation + ''' + + length = len + to_end_rev = [] + for share in itertools.islice(self.get_chain(start), self.net.CHAIN_LENGTH): + if share in self.okay_cache: + return validate(share, to_end_rev[::-1]) + to_end_rev.append(share) + # picking up last share from for loop, ew + self.okay_cache.add(share) + return validate(share, to_end_rev[::-1]) +class Chain(object): + def __init__(self): + pass + +def get_chain_descriptor(tracker, start): + for item in tracker.get_chain(self.net.CHAIN_LENGTH): + a + pass + if __name__ == '__main__': class FakeShare(object): - def __init__(self, hash, previous_hash): + def __init__(self, hash, previous_share_hash): self.hash = hash - self.previous_hash = previous_hash + self.previous_share_hash = previous_share_hash t = Tracker() @@ -233,10 +326,10 @@ if __name__ == '__main__': t.add_share(FakeShare(3, 4)) print t.heads -# TARGET_MULTIPLIER needs to be less than the current difficulty to prevent miner clients from missing shares - class Mainnet(bitcoin_data.Mainnet): - TARGET_MULTIPLIER = SPREAD = 600 + SHARE_PERIOD = 5 # seconds + CHAIN_LENGTH = 1000 # shares + SPREAD = 10 # blocks ROOT_BLOCK = 0x6c9cb0589a44808d9a9361266a4ffb9fea2e2cf4d70bb2118b5 SCRIPT = '4104ffd03de44a6e11b9917f3a29f9443283d9871c9d743ef30d5eddcd37094b64d1b3d8090496b53256786bf5c82932ec23c3b74d9f05a6f95a8b5529352656664bac'.decode('hex') IDENTIFIER = 0x7452839666e1f8f8 @@ -245,7 +338,9 @@ class Mainnet(bitcoin_data.Mainnet): P2P_PORT = 9333 class Testnet(bitcoin_data.Testnet): - TARGET_MULTIPLIER = SPREAD = 30 + SHARE_PERIOD = 5 # seconds + CHAIN_LENGTH = 1000 # shares + SPREAD = 10 # blocks ROOT_BLOCK = 0xd5070cd4f2987ad2191af71393731a2b143f094f7b84c9e6aa9a6a SCRIPT = '410403ad3dee8ab3d8a9ce5dd2abfbe7364ccd9413df1d279bf1a207849310465b0956e5904b1155ecd17574778f9949589ebfd4fb33ce837c241474a225cf08d85dac'.decode('hex') IDENTIFIER = 0x1ae3479e4eb6700a diff --git a/p2pool/main.py b/p2pool/main.py index 1800624..8b3733d 100644 --- a/p2pool/main.py +++ b/p2pool/main.py @@ -17,20 +17,17 @@ from twisted.web import server import bitcoin.p2p, bitcoin.getwork, bitcoin.data from util import db, expiring_dict, jsonrpc, variable, deferral -import p2pool.p2p as p2p +from . import p2p, worker_interface import p2pool.data as p2pool -import worker_interface try: __version__ = subprocess.Popen(['svnversion', os.path.dirname(sys.argv[0])], stdout=subprocess.PIPE).stdout.read().strip() except: __version__ = 'unknown' -if hasattr(sys, "frozen"): - __file__ = sys.executable - class Chain(object): def __init__(self, chain_id_data): + assert False self.chain_id_data = chain_id_data self.last_p2pool_block_hash = p2pool.chain_id_type.unpack(chain_id_data)['last_p2pool_block_hash'] @@ -188,9 +185,11 @@ def main(args): get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100)) + tracker = p2pool.Tracker() chains = expiring_dict.ExpiringDict(300) def get_chain(chain_id_data): return chains.setdefault(chain_id_data, Chain(chain_id_data)) + # information affecting work that should trigger a long-polling update current_work = variable.Variable(None) # information affecting work that should not trigger a long-polling update @@ -201,26 +200,22 @@ def main(args): @defer.inlineCallbacks def set_real_work(): work, height = yield getwork(bitcoind) - last_p2pool_block_hash = (yield get_last_p2pool_block_hash(work.previous_block, get_block, args.net)) - chain = get_chain(p2pool.chain_id_type.pack(dict(last_p2pool_block_hash=last_p2pool_block_hash, bits=work.bits))) current_work.set(dict( version=work.version, previous_block=work.previous_block, - bits=work.bits, + target=work.target, + height=height + 1, - current_chain=chain, - highest_p2pool_share2=chain.get_highest_share2(), - last_p2pool_block_hash=last_p2pool_block_hash, + + highest_p2pool_share_hash=tracker.get_best_share_hash(), )) current_work2.set(dict( timestamp=work.timestamp, )) - print 'Searching for last p2pool-generated block...' + print 'Initializing work...' yield set_real_work() print ' ...success!' - print ' Matched block %x' % (current_work.value['last_p2pool_block_hash'],) - print # setup p2p logic and join p2pool network @@ -232,7 +227,7 @@ def main(args): share2.flag_shared() def p2p_share(share, peer=None): - if share.hash <= bitcoin.data.bits_to_target(share.header['bits']): + if share.hash <= share.header['target']: print print 'GOT BLOCK! Passing to bitcoind! %x' % (share.hash,) #print share.__dict__ @@ -242,8 +237,7 @@ def main(args): else: print 'No bitcoind connection! Erp!' - chain = get_chain(share.chain_id_data) - res = chain.accept(share, args.net) + res = tracker.add_share(share) if res == 'good': share2 = chain.share2s[share.hash] @@ -276,7 +270,7 @@ def main(args): raise ValueError('unknown result from chain.accept - %r' % (res,)) w = dict(current_work.value) - w['highest_p2pool_share2'] = w['current_chain'].get_highest_share2() + w['highest_p2pool_share_hash'] = w['current_chain'].get_highest_share_hash() current_work.set(w) def p2p_share_hash(chain_id_data, hash, peer): @@ -332,7 +326,7 @@ def main(args): current_work=current_work, port=args.p2pool_port, net=args.net, - addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(__file__), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE), + addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE), mode=0 if args.low_bandwidth else 1, preferred_addrs=map(parse, args.p2pool_nodes) + nodes, ) @@ -371,20 +365,21 @@ def main(args): def compute(state): extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()] - generate_tx, shares = p2pool.generate_transaction( - last_p2pool_block_hash=state['last_p2pool_block_hash'], - previous_share2=state['highest_p2pool_share2'], + generate_tx = p2pool.generate_transaction( + tracker=tracker, + previous_share_hash=state['highest_p2pool_share_hash'], new_script=my_script, subsidy=(50*100000000 >> state['height']//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs), nonce=struct.pack("> -m + return n << m + +def clip(x, (low, high)): + if x < low: + return low + elif x > high: + return high + else: + return x