incremented version to 10
[p2pool.git] / p2pool / work.py
index 7089942..6e879d1 100644 (file)
@@ -9,31 +9,21 @@ from twisted.internet import defer
 from twisted.python import log
 
 import bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
-from bitcoin import worker_interface
-from util import jsonrpc, variable, deferral, math, pack
+from bitcoin import helper, script, worker_interface
+from util import forest, jsonrpc, variable, deferral, math, pack
 import p2pool, p2pool.data as p2pool_data
 
 class WorkerBridge(worker_interface.WorkerBridge):
-    def __init__(self, my_pubkey_hash, net, donation_percentage, bitcoind_work, best_block_header, merged_urls, best_share_var, tracker, my_share_hashes, my_doa_share_hashes, worker_fee, p2p_node, submit_block, set_best_share, shared_share_hashes, block_height_var):
+    def __init__(self, node, my_pubkey_hash, donation_percentage, merged_urls, worker_fee):
         worker_interface.WorkerBridge.__init__(self)
         self.recent_shares_ts_work = []
         
+        self.node = node
         self.my_pubkey_hash = my_pubkey_hash
-        self.net = net
         self.donation_percentage = donation_percentage
-        self.bitcoind_work = bitcoind_work
-        self.best_block_header = best_block_header
-        self.best_share_var = best_share_var
-        self.tracker = tracker
-        self.my_share_hashes = my_share_hashes
-        self.my_doa_share_hashes = my_doa_share_hashes
         self.worker_fee = worker_fee
-        self.p2p_node = p2p_node
-        self.submit_block = submit_block
-        self.set_best_share = set_best_share
-        self.shared_share_hashes = shared_share_hashes
-        self.block_height_var = block_height_var
         
+        self.running = True
         self.pseudoshare_received = variable.Event()
         self.share_received = variable.Event()
         self.local_rate_monitor = math.RateMonitor(10*60)
@@ -41,16 +31,27 @@ class WorkerBridge(worker_interface.WorkerBridge):
         self.removed_unstales_var = variable.Variable((0, 0, 0))
         self.removed_doa_unstales_var = variable.Variable(0)
         
-        @tracker.verified.removed.watch
+        
+        self.my_share_hashes = set()
+        self.my_doa_share_hashes = set()
+        
+        self.tracker_view = forest.TrackerView(self.node.tracker, forest.get_attributedelta_type(dict(forest.AttributeDelta.attrs,
+            my_count=lambda share: 1 if share.hash in self.my_share_hashes else 0,
+            my_doa_count=lambda share: 1 if share.hash in self.my_doa_share_hashes else 0,
+            my_orphan_announce_count=lambda share: 1 if share.hash in self.my_share_hashes and share.share_data['stale_info'] == 'orphan' else 0,
+            my_dead_announce_count=lambda share: 1 if share.hash in self.my_share_hashes and share.share_data['stale_info'] == 'doa' else 0,
+        )))
+        
+        @self.node.tracker.verified.removed.watch
         def _(share):
-            if share.hash in self.my_share_hashes and tracker.is_child_of(share.hash, self.best_share_var.value):
+            if share.hash in self.my_share_hashes and self.node.tracker.is_child_of(share.hash, self.node.best_share_var.value):
                 assert share.share_data['stale_info'] in [None, 'orphan', 'doa'] # we made these shares in this instance
                 self.removed_unstales_var.set((
                     self.removed_unstales_var.value[0] + 1,
                     self.removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 'orphan' else 0),
                     self.removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 'doa' else 0),
                 ))
-            if share.hash in self.my_doa_share_hashes and self.tracker.is_child_of(share.hash, self.best_share_var.value):
+            if share.hash in self.my_doa_share_hashes and self.node.tracker.is_child_of(share.hash, self.node.best_share_var.value):
                 self.removed_doa_unstales_var.set(self.removed_doa_unstales_var.value + 1)
         
         # MERGED WORK
@@ -59,12 +60,12 @@ class WorkerBridge(worker_interface.WorkerBridge):
         
         @defer.inlineCallbacks
         def set_merged_work(merged_url, merged_userpass):
-            merged_proxy = jsonrpc.Proxy(merged_url, dict(Authorization='Basic ' + base64.b64encode(merged_userpass)))
-            while True:
+            merged_proxy = jsonrpc.HTTPProxy(merged_url, dict(Authorization='Basic ' + base64.b64encode(merged_userpass)))
+            while self.running:
                 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 30)(merged_proxy.rpc_getauxblock)()
                 self.merged_work.set(dict(self.merged_work.value, **{auxblock['chainid']: dict(
                     hash=int(auxblock['hash'], 16),
-                    target=pack.IntType(256).unpack(auxblock['target'].decode('hex')),
+                    target='p2pool' if auxblock['target'] == 'p2pool' else pack.IntType(256).unpack(auxblock['target'].decode('hex')),
                     merged_proxy=merged_proxy,
                 )}))
                 yield deferral.sleep(1)
@@ -79,10 +80,9 @@ class WorkerBridge(worker_interface.WorkerBridge):
         
         self.current_work = variable.Variable(None)
         def compute_work():
-            t = dict(self.bitcoind_work.value)
-            
-            bb = self.best_block_header.value
-            if bb is not None and bb['previous_block'] == t['previous_block'] and net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(bb)) <= t['bits'].target:
+            t = self.node.bitcoind_work.value
+            bb = self.node.best_block_header.value
+            if bb is not None and bb['previous_block'] == t['previous_block'] and self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(bb)) <= t['bits'].target:
                 print 'Skipping from block %x to block %x!' % (bb['previous_block'],
                     bitcoin_data.hash256(bitcoin_data.block_header_type.pack(bb)))
                 t = dict(
@@ -90,17 +90,18 @@ class WorkerBridge(worker_interface.WorkerBridge):
                     previous_block=bitcoin_data.hash256(bitcoin_data.block_header_type.pack(bb)),
                     bits=bb['bits'], # not always true
                     coinbaseflags='',
+                    height=t['height'] + 1,
                     time=bb['timestamp'] + 600, # better way?
                     transactions=[],
+                    transaction_fees=[],
                     merkle_link=bitcoin_data.calculate_merkle_link([None], 0),
-                    subsidy=net.PARENT.SUBSIDY_FUNC(self.block_height_var.value),
-                    clock_offset=self.bitcoind_work.value['clock_offset'],
-                    last_update=self.bitcoind_work.value['last_update'],
+                    subsidy=self.node.net.PARENT.SUBSIDY_FUNC(self.node.bitcoind_work.value['height']),
+                    last_update=self.node.bitcoind_work.value['last_update'],
                 )
             
             self.current_work.set(t)
-        self.bitcoind_work.changed.watch(lambda _: compute_work())
-        self.best_block_header.changed.watch(lambda _: compute_work())
+        self.node.bitcoind_work.changed.watch(lambda _: compute_work())
+        self.node.best_block_header.changed.watch(lambda _: compute_work())
         compute_work()
         
         self.new_work_event = variable.Event()
@@ -110,13 +111,16 @@ class WorkerBridge(worker_interface.WorkerBridge):
             if any(before[x] != after[x] for x in ['version', 'previous_block', 'bits']) or (not before['transactions'] and after['transactions']):
                 self.new_work_event.happened()
         self.merged_work.changed.watch(lambda _: self.new_work_event.happened())
-        self.best_share_var.changed.watch(lambda _: self.new_work_event.happened())
+        self.node.best_share_var.changed.watch(lambda _: self.new_work_event.happened())
+    
+    def stop(self):
+        self.running = False
     
     def get_stale_counts(self):
         '''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)'''
         my_shares = len(self.my_share_hashes)
         my_doa_shares = len(self.my_doa_share_hashes)
-        delta = self.tracker.verified.get_delta_to_last(self.best_share_var.value)
+        delta = self.tracker_view.get_delta_to_last(self.node.best_share_var.value)
         my_shares_in_chain = delta.my_count + self.removed_unstales_var.value[0]
         my_doa_shares_in_chain = delta.my_doa_count + self.removed_doa_unstales_var.value
         orphans_recorded_in_chain = delta.my_orphan_announce_count + self.removed_unstales_var.value[1]
@@ -127,9 +131,7 @@ class WorkerBridge(worker_interface.WorkerBridge):
         
         return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
     
-    def get_user_details(self, request):
-        user = request.getUser() if request.getUser() is not None else ''
-        
+    def get_user_details(self, user):
         desired_pseudoshare_target = None
         if '+' in user:
             user, desired_pseudoshare_difficulty_str = user.rsplit('+', 1)
@@ -150,23 +152,23 @@ class WorkerBridge(worker_interface.WorkerBridge):
             pubkey_hash = self.my_pubkey_hash
         else:
             try:
-                pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, self.net.PARENT)
+                pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, self.node.net.PARENT)
             except: # XXX blah
                 pubkey_hash = self.my_pubkey_hash
         
         return user, pubkey_hash, desired_share_target, desired_pseudoshare_target
     
-    def preprocess_request(self, request):
-        user, pubkey_hash, desired_share_target, desired_pseudoshare_target = self.get_user_details(request)
+    def preprocess_request(self, user):
+        user, pubkey_hash, desired_share_target, desired_pseudoshare_target = self.get_user_details(user)
         return pubkey_hash, desired_share_target, desired_pseudoshare_target
     
     def get_work(self, pubkey_hash, desired_share_target, desired_pseudoshare_target):
-        if len(self.p2p_node.peers) == 0 and self.net.PERSIST:
-            raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
-        if self.best_share_var.value is None and self.net.PERSIST:
-            raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
+        if (self.node.p2p_node is None or len(self.node.p2p_node.peers) == 0) and self.node.net.PERSIST:
+            raise jsonrpc.Error_for_code(-12345)(u'p2pool is not connected to any peers')
+        if self.node.best_share_var.value is None and self.node.net.PERSIST:
+            raise jsonrpc.Error_for_code(-12345)(u'p2pool is downloading shares')
         if time.time() > self.current_work.value['last_update'] + 60:
-            raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
+            raise jsonrpc.Error_for_code(-12345)(u'lost contact with bitcoind')
         
         if self.merged_work.value:
             tree, size = bitcoin_data.make_auxpow_tree(self.merged_work.value)
@@ -181,12 +183,37 @@ class WorkerBridge(worker_interface.WorkerBridge):
             mm_data = ''
             mm_later = []
         
+        tx_hashes = [bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in self.current_work.value['transactions']]
+        tx_map = dict(zip(tx_hashes, self.current_work.value['transactions']))
+        
+        if self.node.best_share_var.value is None:
+            share_type = p2pool_data.Share
+        else:
+            previous_share = self.node.tracker.items[self.node.best_share_var.value]
+            previous_share_type = type(previous_share)
+            
+            if previous_share_type.SUCCESSOR is None or self.node.tracker.get_height(previous_share.hash) < self.node.net.CHAIN_LENGTH:
+                share_type = previous_share_type
+            else:
+                successor_type = previous_share_type.SUCCESSOR
+                
+                counts = p2pool_data.get_desired_version_counts(self.node.tracker,
+                    self.node.tracker.get_nth_parent_hash(previous_share.hash, self.node.net.CHAIN_LENGTH*9//10), self.node.net.CHAIN_LENGTH//10)
+                # Share -> NewShare only valid if 85% of hashes in [net.CHAIN_LENGTH*9//10, net.CHAIN_LENGTH] for new version
+                if counts.get(successor_type.VERSION, 0) > sum(counts.itervalues())*95//100:
+                    share_type = successor_type
+                else:
+                    share_type = previous_share_type
+        
         if True:
-            share_info, generate_tx = p2pool_data.Share.generate_transaction(
-                tracker=self.tracker,
+            share_info, gentx, other_transaction_hashes, get_share = share_type.generate_transaction(
+                tracker=self.node.tracker,
                 share_data=dict(
-                    previous_share_hash=self.best_share_var.value,
-                    coinbase=(mm_data + self.current_work.value['coinbaseflags'])[:100],
+                    previous_share_hash=self.node.best_share_var.value,
+                    coinbase=(script.create_push_script([
+                        self.current_work.value['height'],
+                        ] + ([mm_data] if mm_data else []) + [
+                    ]) + self.current_work.value['coinbaseflags'])[:100],
                     nonce=random.randrange(2**32),
                     pubkey_hash=pubkey_hash,
                     subsidy=self.current_work.value['subsidy'],
@@ -196,15 +223,23 @@ class WorkerBridge(worker_interface.WorkerBridge):
                         'doa' if doas > doas_recorded_in_chain else
                         None
                     )(*self.get_stale_counts()),
-                    desired_version=3,
+                    desired_version=10,
                 ),
                 block_target=self.current_work.value['bits'].target,
-                desired_timestamp=int(time.time() - self.current_work.value['clock_offset']),
+                desired_timestamp=int(time.time() + 0.5),
                 desired_target=desired_share_target,
                 ref_merkle_link=dict(branch=[], index=0),
-                net=self.net,
+                desired_other_transaction_hashes_and_fees=zip(tx_hashes, self.current_work.value['transaction_fees']),
+                net=self.node.net,
+                known_txs=tx_map,
+                base_subsidy=self.node.net.PARENT.SUBSIDY_FUNC(self.current_work.value['height']),
             )
         
+        packed_gentx = bitcoin_data.tx_type.pack(gentx)
+        other_transactions = [tx_map[tx_hash] for tx_hash in other_transaction_hashes]
+        
+        mm_later = [(dict(aux_work, target=aux_work['target'] if aux_work['target'] != 'p2pool' else share_info['bits'].target), index, hashes) for aux_work, index, hashes in mm_later]
+        
         if desired_pseudoshare_target is None:
             target = 2**256-1
             if len(self.recent_shares_ts_work) == 50:
@@ -214,31 +249,27 @@ class WorkerBridge(worker_interface.WorkerBridge):
         else:
             target = desired_pseudoshare_target
         target = max(target, share_info['bits'].target)
-        for aux_work in self.merged_work.value.itervalues():
+        for aux_work, index, hashes in mm_later:
             target = max(target, aux_work['target'])
-        target = math.clip(target, self.net.PARENT.SANE_TARGET_RANGE)
-        
-        transactions = [generate_tx] + list(self.current_work.value['transactions'])
-        packed_generate_tx = bitcoin_data.tx_type.pack(generate_tx)
-        merkle_root = bitcoin_data.check_merkle_link(bitcoin_data.hash256(packed_generate_tx), self.current_work.value['merkle_link'])
+        target = math.clip(target, self.node.net.PARENT.SANE_TARGET_RANGE)
         
         getwork_time = time.time()
         lp_count = self.new_work_event.times
-        merkle_link = self.current_work.value['merkle_link']
+        merkle_link = bitcoin_data.calculate_merkle_link([None] + other_transaction_hashes, 0)
         
         print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % (
             bitcoin_data.target_to_difficulty(target),
             bitcoin_data.target_to_difficulty(share_info['bits'].target),
-            self.current_work.value['subsidy']*1e-8, self.net.PARENT.SYMBOL,
+            self.current_work.value['subsidy']*1e-8, self.node.net.PARENT.SYMBOL,
             len(self.current_work.value['transactions']),
         )
         
-        bits = self.current_work.value['bits']
-        previous_block = self.current_work.value['previous_block']
-        ba = bitcoin_getwork.BlockAttempt(
-            version=self.current_work.value['version'],
+        ba = dict(
+            version=min(self.current_work.value['version'], 2),
             previous_block=self.current_work.value['previous_block'],
-            merkle_root=merkle_root,
+            merkle_link=merkle_link,
+            coinb1=packed_gentx[:-4-4],
+            coinb2=packed_gentx[-4:],
             timestamp=self.current_work.value['time'],
             bits=self.current_work.value['bits'],
             share_target=target,
@@ -246,23 +277,26 @@ class WorkerBridge(worker_interface.WorkerBridge):
         
         received_header_hashes = set()
         
-        def got_response(header, request):
+        def got_response(header, user, last_txout_nonce):
+            new_packed_gentx = packed_gentx[:-4-4] + pack.IntType(32).pack(last_txout_nonce) + packed_gentx[-4:] if last_txout_nonce != 0 else packed_gentx
+            new_gentx = bitcoin_data.tx_type.unpack(new_packed_gentx) if last_txout_nonce != 0 else gentx
+            
             header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header))
-            pow_hash = self.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
+            pow_hash = self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
             try:
                 if pow_hash <= header['bits'].target or p2pool.DEBUG:
-                    self.submit_block(dict(header=header, txs=transactions), ignore_failure=False)
+                    helper.submit_block(dict(header=header, txs=[new_gentx] + other_transactions), False, self.node.factory, self.node.bitcoind, self.node.bitcoind_work, self.node.net)
                     if pow_hash <= header['bits'].target:
                         print
-                        print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
+                        print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
                         print
             except:
                 log.err(None, 'Error while processing potential block:')
             
-            user, _, _, _ = self.get_user_details(request)
-            assert header['merkle_root'] == merkle_root
-            assert header['previous_block'] == previous_block
-            assert header['bits'] == bits
+            user, _, _, _ = self.get_user_details(user)
+            assert header['previous_block'] == ba['previous_block']
+            assert header['merkle_root'] == bitcoin_data.check_merkle_link(bitcoin_data.hash256(new_packed_gentx), merkle_link)
+            assert header['bits'] == ba['bits']
             
             on_time = self.new_work_event.times == lp_count
             
@@ -273,7 +307,7 @@ class WorkerBridge(worker_interface.WorkerBridge):
                             pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
                             bitcoin_data.aux_pow_type.pack(dict(
                                 merkle_tx=dict(
-                                    tx=transactions[0],
+                                    tx=new_gentx,
                                     block_hash=header_hash,
                                     merkle_link=merkle_link,
                                 ),
@@ -282,7 +316,7 @@ class WorkerBridge(worker_interface.WorkerBridge):
                             )).encode('hex'),
                         )
                         @df.addCallback
-                        def _(result):
+                        def _(result, aux_work=aux_work):
                             if result != (pow_hash <= aux_work['target']):
                                 print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
                             else:
@@ -294,15 +328,10 @@ class WorkerBridge(worker_interface.WorkerBridge):
                     log.err(None, 'Error while processing merged mining POW:')
             
             if pow_hash <= share_info['bits'].target and header_hash not in received_header_hashes:
-                min_header = dict(header);del min_header['merkle_root']
-                hash_link = p2pool_data.prefix_to_hash_link(packed_generate_tx[:-32-4], p2pool_data.Share.gentx_before_refhash)
-                share = p2pool_data.Share(self.net, None, dict(
-                    min_header=min_header, share_info=share_info, hash_link=hash_link,
-                    ref_merkle_link=dict(branch=[], index=0),
-                ), merkle_link=merkle_link, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
+                share = get_share(header, last_txout_nonce)
                 
                 print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
-                    request.getUser(),
+                    user,
                     p2pool_data.format_hash(share.hash),
                     p2pool_data.format_hash(share.previous_hash),
                     time.time() - getwork_time,
@@ -312,27 +341,23 @@ class WorkerBridge(worker_interface.WorkerBridge):
                 if not on_time:
                     self.my_doa_share_hashes.add(share.hash)
                 
-                self.tracker.add(share)
-                if not p2pool.DEBUG:
-                    self.tracker.verified.add(share)
-                self.set_best_share()
+                self.node.tracker.add(share)
+                self.node.set_best_share()
                 
                 try:
-                    if pow_hash <= header['bits'].target or p2pool.DEBUG:
-                        for peer in self.p2p_node.peers.itervalues():
-                            peer.sendShares([share])
-                        self.shared_share_hashes.add(share.hash)
+                    if (pow_hash <= header['bits'].target or p2pool.DEBUG) and self.node.p2p_node is not None:
+                        self.node.p2p_node.broadcast_share(share.hash)
                 except:
                     log.err(None, 'Error forwarding block solution:')
                 
                 self.share_received.happened(bitcoin_data.target_to_average_attempts(share.target), not on_time)
             
             if pow_hash > target:
-                print 'Worker %s submitted share with hash > target:' % (request.getUser(),)
+                print 'Worker %s submitted share with hash > target:' % (user,)
                 print '    Hash:   %56x' % (pow_hash,)
                 print '    Target: %56x' % (target,)
             elif header_hash in received_header_hashes:
-                print >>sys.stderr, 'Worker %s @ %s submitted share more than once!' % (request.getUser(), request.getClientIP())
+                print >>sys.stderr, 'Worker %s submitted share more than once!' % (user,)
             else:
                 received_header_hashes.add(header_hash)