removed getblockcount polling, made unnecessary by having height in getblocktemplate...
[p2pool.git] / p2pool / work.py
index 853862a..b389100 100644 (file)
@@ -9,12 +9,12 @@ from twisted.internet import defer
 from twisted.python import log
 
 import bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
-from bitcoin import worker_interface
+from bitcoin import script, worker_interface
 from util import jsonrpc, variable, deferral, math, pack
 import p2pool, p2pool.data as p2pool_data
 
 class WorkerBridge(worker_interface.WorkerBridge):
-    def __init__(self, my_pubkey_hash, net, donation_percentage, bitcoind_work, best_block_header, merged_urls, best_share_var, tracker, my_share_hashes, my_doa_share_hashes, worker_fee, p2p_node, submit_block, set_best_share, shared_share_hashes, block_height_var):
+    def __init__(self, my_pubkey_hash, net, donation_percentage, bitcoind_work, best_block_header, merged_urls, best_share_var, tracker, my_share_hashes, my_doa_share_hashes, worker_fee, p2p_node, submit_block, set_best_share, broadcast_share):
         worker_interface.WorkerBridge.__init__(self)
         self.recent_shares_ts_work = []
         
@@ -31,8 +31,7 @@ class WorkerBridge(worker_interface.WorkerBridge):
         self.p2p_node = p2p_node
         self.submit_block = submit_block
         self.set_best_share = set_best_share
-        self.shared_share_hashes = shared_share_hashes
-        self.block_height_var = block_height_var
+        self.broadcast_share = broadcast_share
         
         self.pseudoshare_received = variable.Event()
         self.share_received = variable.Event()
@@ -61,10 +60,10 @@ class WorkerBridge(worker_interface.WorkerBridge):
         def set_merged_work(merged_url, merged_userpass):
             merged_proxy = jsonrpc.Proxy(merged_url, dict(Authorization='Basic ' + base64.b64encode(merged_userpass)))
             while True:
-                auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged_proxy.rpc_getauxblock)()
+                auxblock = yield deferral.retry('Error while calling merged getauxblock:', 30)(merged_proxy.rpc_getauxblock)()
                 self.merged_work.set(dict(self.merged_work.value, **{auxblock['chainid']: dict(
                     hash=int(auxblock['hash'], 16),
-                    target=pack.IntType(256).unpack(auxblock['target'].decode('hex')),
+                    target='p2pool' if auxblock['target'] == 'p2pool' else pack.IntType(256).unpack(auxblock['target'].decode('hex')),
                     merged_proxy=merged_proxy,
                 )}))
                 yield deferral.sleep(1)
@@ -79,8 +78,7 @@ class WorkerBridge(worker_interface.WorkerBridge):
         
         self.current_work = variable.Variable(None)
         def compute_work():
-            t = dict(self.bitcoind_work.value)
-            
+            t = self.bitcoind_work.value
             bb = self.best_block_header.value
             if bb is not None and bb['previous_block'] == t['previous_block'] and net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(bb)) <= t['bits'].target:
                 print 'Skipping from block %x to block %x!' % (bb['previous_block'],
@@ -90,12 +88,12 @@ class WorkerBridge(worker_interface.WorkerBridge):
                     previous_block=bitcoin_data.hash256(bitcoin_data.block_header_type.pack(bb)),
                     bits=bb['bits'], # not always true
                     coinbaseflags='',
+                    height=t['height'] + 1,
                     time=bb['timestamp'] + 600, # better way?
                     transactions=[],
                     merkle_link=bitcoin_data.calculate_merkle_link([None], 0),
-                    subsidy=net.PARENT.SUBSIDY_FUNC(self.block_height_var.value),
-                    clock_offset=self.current_work.value['clock_offset'],
-                    last_update=self.current_work.value['last_update'],
+                    subsidy=net.PARENT.SUBSIDY_FUNC(self.bitcoind_work.value['height']),
+                    last_update=self.bitcoind_work.value['last_update'],
                 )
             
             self.current_work.set(t)
@@ -162,11 +160,11 @@ class WorkerBridge(worker_interface.WorkerBridge):
     
     def get_work(self, pubkey_hash, desired_share_target, desired_pseudoshare_target):
         if len(self.p2p_node.peers) == 0 and self.net.PERSIST:
-            raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
+            raise jsonrpc.Error_for_code(-12345)(u'p2pool is not connected to any peers')
         if self.best_share_var.value is None and self.net.PERSIST:
-            raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
+            raise jsonrpc.Error_for_code(-12345)(u'p2pool is downloading shares')
         if time.time() > self.current_work.value['last_update'] + 60:
-            raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
+            raise jsonrpc.Error_for_code(-12345)(u'lost contact with bitcoind')
         
         if self.merged_work.value:
             tree, size = bitcoin_data.make_auxpow_tree(self.merged_work.value)
@@ -181,12 +179,33 @@ class WorkerBridge(worker_interface.WorkerBridge):
             mm_data = ''
             mm_later = []
         
+        tx_hashes = [bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in self.current_work.value['transactions']]
+        tx_map = dict(zip(tx_hashes, self.current_work.value['transactions']))
+        
+        share_type = p2pool_data.NewShare
+        if self.best_share_var.value is not None:
+            previous_share = self.tracker.items[self.best_share_var.value]
+            if isinstance(previous_share, p2pool_data.Share):
+                # Share -> NewShare only valid if 85% of hashes in [net.CHAIN_LENGTH*9//10, net.CHAIN_LENGTH] for new version
+                if self.tracker.get_height(previous_share.hash) < self.net.CHAIN_LENGTH:
+                    share_type = p2pool_data.Share
+                elif time.time() < 1351383661 and self.net.NAME == 'bitcoin':
+                    share_type = p2pool_data.Share
+                else:
+                    counts = p2pool_data.get_desired_version_counts(self.tracker,
+                        self.tracker.get_nth_parent_hash(previous_share.hash, self.net.CHAIN_LENGTH*9//10), self.net.CHAIN_LENGTH//10)
+                    if counts.get(p2pool_data.NewShare.VERSION, 0) < sum(counts.itervalues())*95//100:
+                        share_type = p2pool_data.Share
+        
         if True:
-            share_info, generate_tx = p2pool_data.Share.generate_transaction(
+            share_info, gentx, other_transaction_hashes, get_share = share_type.generate_transaction(
                 tracker=self.tracker,
                 share_data=dict(
                     previous_share_hash=self.best_share_var.value,
-                    coinbase=(mm_data + self.current_work.value['coinbaseflags'])[:100],
+                    coinbase=(script.create_push_script([
+                        self.current_work.value['height'],
+                        ] + ([mm_data] if mm_data else []) + [
+                    ]) + self.current_work.value['coinbaseflags'])[:100],
                     nonce=random.randrange(2**32),
                     pubkey_hash=pubkey_hash,
                     subsidy=self.current_work.value['subsidy'],
@@ -196,15 +215,21 @@ class WorkerBridge(worker_interface.WorkerBridge):
                         'doa' if doas > doas_recorded_in_chain else
                         None
                     )(*self.get_stale_counts()),
-                    desired_version=3,
+                    desired_version=p2pool_data.NewShare.VERSION,
                 ),
                 block_target=self.current_work.value['bits'].target,
-                desired_timestamp=int(time.time() - self.current_work.value['clock_offset']),
+                desired_timestamp=int(time.time() + 0.5),
                 desired_target=desired_share_target,
                 ref_merkle_link=dict(branch=[], index=0),
+                desired_other_transaction_hashes=tx_hashes,
                 net=self.net,
+                known_txs=tx_map,
             )
         
+        transactions = [gentx] + [tx_map[tx_hash] for tx_hash in other_transaction_hashes]
+        
+        mm_later = [(dict(aux_work, target=aux_work['target'] if aux_work['target'] != 'p2pool' else share_info['bits'].target), index, hashes) for aux_work, index, hashes in mm_later]
+        
         if desired_pseudoshare_target is None:
             target = 2**256-1
             if len(self.recent_shares_ts_work) == 50:
@@ -214,16 +239,13 @@ class WorkerBridge(worker_interface.WorkerBridge):
         else:
             target = desired_pseudoshare_target
         target = max(target, share_info['bits'].target)
-        for aux_work in self.merged_work.value.itervalues():
+        for aux_work, index, hashes in mm_later:
             target = max(target, aux_work['target'])
         target = math.clip(target, self.net.PARENT.SANE_TARGET_RANGE)
         
-        transactions = [generate_tx] + list(self.current_work.value['transactions'])
-        packed_generate_tx = bitcoin_data.tx_type.pack(generate_tx)
-        merkle_root = bitcoin_data.check_merkle_link(bitcoin_data.hash256(packed_generate_tx), self.current_work.value['merkle_link'])
-        
         getwork_time = time.time()
-        merkle_link = self.current_work.value['merkle_link']
+        lp_count = self.new_work_event.times
+        merkle_link = bitcoin_data.calculate_merkle_link([bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in transactions], 0)
         
         print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % (
             bitcoin_data.target_to_difficulty(target),
@@ -232,12 +254,10 @@ class WorkerBridge(worker_interface.WorkerBridge):
             len(self.current_work.value['transactions']),
         )
         
-        bits = self.current_work.value['bits']
-        previous_block = self.current_work.value['previous_block']
         ba = bitcoin_getwork.BlockAttempt(
-            version=self.current_work.value['version'],
+            version=min(self.current_work.value['version'], 2),
             previous_block=self.current_work.value['previous_block'],
-            merkle_root=merkle_root,
+            merkle_root=bitcoin_data.check_merkle_link(bitcoin_data.hash256(bitcoin_data.tx_type.pack(transactions[0])), merkle_link),
             timestamp=self.current_work.value['time'],
             bits=self.current_work.value['bits'],
             share_target=target,
@@ -259,11 +279,11 @@ class WorkerBridge(worker_interface.WorkerBridge):
                 log.err(None, 'Error while processing potential block:')
             
             user, _, _, _ = self.get_user_details(request)
-            assert header['merkle_root'] == merkle_root
-            assert header['previous_block'] == previous_block
-            assert header['bits'] == bits
+            assert header['previous_block'] == ba.previous_block
+            assert header['merkle_root'] == ba.merkle_root
+            assert header['bits'] == ba.bits
             
-            on_time = self.best_share_var.value == share_info['share_data']['previous_share_hash']
+            on_time = self.new_work_event.times == lp_count
             
             for aux_work, index, hashes in mm_later:
                 try:
@@ -281,7 +301,7 @@ class WorkerBridge(worker_interface.WorkerBridge):
                             )).encode('hex'),
                         )
                         @df.addCallback
-                        def _(result):
+                        def _(result, aux_work=aux_work):
                             if result != (pow_hash <= aux_work['target']):
                                 print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
                             else:
@@ -293,12 +313,7 @@ class WorkerBridge(worker_interface.WorkerBridge):
                     log.err(None, 'Error while processing merged mining POW:')
             
             if pow_hash <= share_info['bits'].target and header_hash not in received_header_hashes:
-                min_header = dict(header);del min_header['merkle_root']
-                hash_link = p2pool_data.prefix_to_hash_link(packed_generate_tx[:-32-4], p2pool_data.Share.gentx_before_refhash)
-                share = p2pool_data.Share(self.net, None, dict(
-                    min_header=min_header, share_info=share_info, hash_link=hash_link,
-                    ref_merkle_link=dict(branch=[], index=0),
-                ), merkle_link=merkle_link, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
+                share = get_share(header, transactions)
                 
                 print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
                     request.getUser(),
@@ -318,9 +333,7 @@ class WorkerBridge(worker_interface.WorkerBridge):
                 
                 try:
                     if pow_hash <= header['bits'].target or p2pool.DEBUG:
-                        for peer in self.p2p_node.peers.itervalues():
-                            peer.sendShares([share])
-                        self.shared_share_hashes.add(share.hash)
+                        self.broadcast_share(share.hash)
                 except:
                     log.err(None, 'Error forwarding block solution:')