fixes for v3 blocks
[p2pool.git] / p2pool / work.py
1 from __future__ import division
2
3 import base64
4 import random
5 import sys
6 import time
7
8 from twisted.internet import defer
9 from twisted.python import log
10
11 import bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
12 from bitcoin import helper, script, worker_interface
13 from util import forest, jsonrpc, variable, deferral, math, pack
14 import p2pool, p2pool.data as p2pool_data
15
16 class WorkerBridge(worker_interface.WorkerBridge):
17     COINBASE_NONCE_LENGTH = 4
18     
19     def __init__(self, node, my_pubkey, donation_percentage, merged_urls, worker_fee):
20         worker_interface.WorkerBridge.__init__(self)
21         self.recent_shares_ts_work = []
22         
23         self.node = node
24         self.my_pubkey = my_pubkey
25         self.donation_percentage = donation_percentage
26         self.worker_fee = worker_fee
27         
28         self.running = True
29         self.pseudoshare_received = variable.Event()
30         self.share_received = variable.Event()
31         self.local_rate_monitor = math.RateMonitor(10*60)
32         
33         self.removed_unstales_var = variable.Variable((0, 0, 0))
34         self.removed_doa_unstales_var = variable.Variable(0)
35         
36         
37         self.my_share_hashes = set()
38         self.my_doa_share_hashes = set()
39         
40         self.tracker_view = forest.TrackerView(self.node.tracker, forest.get_attributedelta_type(dict(forest.AttributeDelta.attrs,
41             my_count=lambda share: 1 if share.hash in self.my_share_hashes else 0,
42             my_doa_count=lambda share: 1 if share.hash in self.my_doa_share_hashes else 0,
43             my_orphan_announce_count=lambda share: 1 if share.hash in self.my_share_hashes and share.share_data['stale_info'] == 'orphan' else 0,
44             my_dead_announce_count=lambda share: 1 if share.hash in self.my_share_hashes and share.share_data['stale_info'] == 'doa' else 0,
45         )))
46         
47         @self.node.tracker.verified.removed.watch
48         def _(share):
49             if share.hash in self.my_share_hashes and self.node.tracker.is_child_of(share.hash, self.node.best_share_var.value):
50                 assert share.share_data['stale_info'] in [None, 'orphan', 'doa'] # we made these shares in this instance
51                 self.removed_unstales_var.set((
52                     self.removed_unstales_var.value[0] + 1,
53                     self.removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 'orphan' else 0),
54                     self.removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 'doa' else 0),
55                 ))
56             if share.hash in self.my_doa_share_hashes and self.node.tracker.is_child_of(share.hash, self.node.best_share_var.value):
57                 self.removed_doa_unstales_var.set(self.removed_doa_unstales_var.value + 1)
58         
59         # MERGED WORK
60         
61         self.merged_work = variable.Variable({})
62         
63         @defer.inlineCallbacks
64         def set_merged_work(merged_url, merged_userpass):
65             merged_proxy = jsonrpc.HTTPProxy(merged_url, dict(Authorization='Basic ' + base64.b64encode(merged_userpass)))
66             while self.running:
67                 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 30)(merged_proxy.rpc_getauxblock)()
68                 self.merged_work.set(dict(self.merged_work.value, **{auxblock['chainid']: dict(
69                     hash=int(auxblock['hash'], 16),
70                     target='p2pool' if auxblock['target'] == 'p2pool' else pack.IntType(256).unpack(auxblock['target'].decode('hex')),
71                     merged_proxy=merged_proxy,
72                 )}))
73                 yield deferral.sleep(1)
74         for merged_url, merged_userpass in merged_urls:
75             set_merged_work(merged_url, merged_userpass)
76         
77         @self.merged_work.changed.watch
78         def _(new_merged_work):
79             print 'Got new merged mining work!'
80         
81         # COMBINE WORK
82         
83
84         self.current_work = variable.Variable(None)
85         def compute_work():
86             t = self.node.bitcoind_work.value
87             bb = self.node.best_block_header.value
88
89 #            subsidy = self.node.net.PARENT.SUBSIDY_FUNC(self.node.pow_bits.target)
90
91             if bb is not None and bb['previous_block'] == t['previous_block'] and self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(bb)) <= t['bits'].target:
92                 print 'Skipping from block %x to block %x!' % (bb['previous_block'],
93                     self.node.net.PARENT.BLOCKHASH_FUNC(bitcoin_data.block_header_type.pack(bb)))
94                 t = dict(
95                     version=bb['version'],
96                     previous_block=self.node.net.PARENT.BLOCKHASH_FUNC(bitcoin_data.block_header_type.pack(bb)),
97                     bits=self.node.pow_bits, # not always true
98                     coinbaseflags='',
99                     height=t['height'] + 1,
100                     time=t['time'] + 30, # better way?
101                     transactions=[],
102                     transaction_fees=[],
103                     txn_timestamp=0,
104                     merkle_link=bitcoin_data.calculate_merkle_link([None], 0),
105                     subsidy=self.node.pow_subsidy,
106                     last_update=self.node.bitcoind_work.value['last_update'],
107                 )
108             
109             self.current_work.set(t)
110         self.node.bitcoind_work.changed.watch(lambda _: compute_work())
111         self.node.best_block_header.changed.watch(lambda _: compute_work())
112         compute_work()
113         
114         self.new_work_event = variable.Event()
115         @self.current_work.transitioned.watch
116         def _(before, after):
117             # trigger LP if version/previous_block/bits changed or transactions changed from nothing
118             if any(before[x] != after[x] for x in ['version', 'previous_block', 'bits']) or (not before['transactions'] and after['transactions']):
119                 self.new_work_event.happened()
120         self.merged_work.changed.watch(lambda _: self.new_work_event.happened())
121         self.node.best_share_var.changed.watch(lambda _: self.new_work_event.happened())
122     
123     def stop(self):
124         self.running = False
125     
126     def get_stale_counts(self):
127         '''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)'''
128         my_shares = len(self.my_share_hashes)
129         my_doa_shares = len(self.my_doa_share_hashes)
130         delta = self.tracker_view.get_delta_to_last(self.node.best_share_var.value)
131         my_shares_in_chain = delta.my_count + self.removed_unstales_var.value[0]
132         my_doa_shares_in_chain = delta.my_doa_count + self.removed_doa_unstales_var.value
133         orphans_recorded_in_chain = delta.my_orphan_announce_count + self.removed_unstales_var.value[1]
134         doas_recorded_in_chain = delta.my_dead_announce_count + self.removed_unstales_var.value[2]
135         
136         my_shares_not_in_chain = my_shares - my_shares_in_chain
137         my_doa_shares_not_in_chain = my_doa_shares - my_doa_shares_in_chain
138         
139         return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
140     
141     def get_user_details(self, user):
142         desired_pseudoshare_target = None
143         if '+' in user:
144             user, desired_pseudoshare_difficulty_str = user.rsplit('+', 1)
145             try:
146                 desired_pseudoshare_target = bitcoin_data.difficulty_to_target(float(desired_pseudoshare_difficulty_str))
147             except:
148                 pass
149         
150         desired_share_target = 2**256 - 1
151         if '/' in user:
152             user, min_diff_str = user.rsplit('/', 1)
153             try:
154                 desired_share_target = bitcoin_data.difficulty_to_target(float(min_diff_str))
155             except:
156                 pass
157         
158         pubkey = self.my_pubkey
159         
160         return user, pubkey, desired_share_target, desired_pseudoshare_target
161     
162     def preprocess_request(self, user):
163         user, pubkey, desired_share_target, desired_pseudoshare_target = self.get_user_details(user)
164         return pubkey, desired_share_target, desired_pseudoshare_target
165     
166     def get_work(self, pubkey, desired_share_target, desired_pseudoshare_target):
167         if (self.node.p2p_node is None or len(self.node.p2p_node.peers) == 0) and self.node.net.PERSIST:
168             raise jsonrpc.Error_for_code(-12345)(u'p2pool is not connected to any peers')
169         if self.node.best_share_var.value is None and self.node.net.PERSIST:
170             raise jsonrpc.Error_for_code(-12345)(u'p2pool is downloading shares')
171         if time.time() > self.current_work.value['last_update'] + 60:
172             raise jsonrpc.Error_for_code(-12345)(u'lost contact with bitcoind')
173         
174         if self.merged_work.value:
175             tree, size = bitcoin_data.make_auxpow_tree(self.merged_work.value)
176             mm_hashes = [self.merged_work.value.get(tree.get(i), dict(hash=0))['hash'] for i in xrange(size)]
177             mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(dict(
178                 merkle_root=bitcoin_data.merkle_hash(mm_hashes),
179                 size=size,
180                 nonce=0,
181             ))
182             mm_later = [(aux_work, mm_hashes.index(aux_work['hash']), mm_hashes) for chain_id, aux_work in self.merged_work.value.iteritems()]
183         else:
184             mm_data = ''
185             mm_later = []
186         
187         tx_hashes = [bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in self.current_work.value['transactions']]
188         tx_map = dict(zip(tx_hashes, self.current_work.value['transactions']))
189         txn_timestamp = self.current_work.value['txn_timestamp']
190         
191         #print
192         #print txn_timestamp
193         #print
194         
195         if self.node.best_share_var.value is None:
196             share_type = p2pool_data.Share
197         else:
198             previous_share = self.node.tracker.items[self.node.best_share_var.value]
199             previous_share_type = type(previous_share)
200             
201             if previous_share_type.SUCCESSOR is None or self.node.tracker.get_height(previous_share.hash) < self.node.net.CHAIN_LENGTH:
202                 share_type = previous_share_type
203             else:
204                 successor_type = previous_share_type.SUCCESSOR
205                 
206                 counts = p2pool_data.get_desired_version_counts(self.node.tracker,
207                     self.node.tracker.get_nth_parent_hash(previous_share.hash, self.node.net.CHAIN_LENGTH*9//10), self.node.net.CHAIN_LENGTH//10)
208                 # Share -> NewShare only valid if 85% of hashes in [net.CHAIN_LENGTH*9//10, net.CHAIN_LENGTH] for new version
209                 if counts.get(successor_type.VERSION, 0) > sum(counts.itervalues())*95//100:
210                     share_type = successor_type
211                 else:
212                     share_type = previous_share_type
213         
214         if True:
215             subsidy = self.node.net.PARENT.SUBSIDY_FUNC(self.current_work.value['bits'].target)
216             desired_timestamp = int(time.time() + 0.5)
217
218             share_info, gentx, other_transaction_hashes, get_share = share_type.generate_transaction(
219                 tracker=self.node.tracker,
220                 share_data=dict(
221                     previous_share_hash=self.node.best_share_var.value,
222                     coinbase=(script.create_push_script([
223                         self.current_work.value['height'],
224                         ] + ([mm_data] if mm_data else []) + [
225                     ]) + self.current_work.value['coinbaseflags'])[:100],
226                     nonce=random.randrange(2**32),
227                     pubkey=pubkey,
228                     subsidy=self.current_work.value['subsidy'],
229                     donation=math.perfect_round(65535*self.donation_percentage/100),
230                     stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
231                         'orphan' if orphans > orphans_recorded_in_chain else
232                         'doa' if doas > doas_recorded_in_chain else
233                         None
234                     )(*self.get_stale_counts()),
235                     desired_version=14,
236                 ),
237                 block_target=self.current_work.value['bits'].target,
238                 desired_timestamp=desired_timestamp if txn_timestamp < desired_timestamp else txn_timestamp + 1,
239                 desired_target=desired_share_target,
240                 ref_merkle_link=dict(branch=[], index=0),
241                 desired_other_transaction_hashes_and_fees=zip(tx_hashes, self.current_work.value['transaction_fees']),
242                 net=self.node.net,
243                 known_txs=tx_map,
244                 base_subsidy=subsidy
245             )
246         
247         packed_gentx = bitcoin_data.tx_type.pack(gentx)
248         other_transactions = [tx_map[tx_hash] for tx_hash in other_transaction_hashes]
249         
250         mm_later = [(dict(aux_work, target=aux_work['target'] if aux_work['target'] != 'p2pool' else share_info['bits'].target), index, hashes) for aux_work, index, hashes in mm_later]
251         
252         if desired_pseudoshare_target is None:
253             target = 2**256-1
254             if len(self.recent_shares_ts_work) == 50:
255                 hash_rate = sum(work for ts, work in self.recent_shares_ts_work[1:])//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
256                 if hash_rate:
257                     target = min(target, int(2**256/hash_rate))
258         else:
259             target = desired_pseudoshare_target
260         target = max(target, share_info['bits'].target)
261         for aux_work, index, hashes in mm_later:
262             target = max(target, aux_work['target'])
263         target = math.clip(target, self.node.net.PARENT.SANE_TARGET_RANGE)
264         
265         getwork_time = time.time()
266         lp_count = self.new_work_event.times
267         merkle_link = bitcoin_data.calculate_merkle_link([None] + other_transaction_hashes, 0)
268         
269         print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % (
270             bitcoin_data.target_to_difficulty(target),
271             bitcoin_data.target_to_difficulty(share_info['bits'].target),
272             self.current_work.value['subsidy']*1e-6, self.node.net.PARENT.SYMBOL,
273             len(self.current_work.value['transactions']),
274         )
275         
276         ba = dict(
277             version=min(self.current_work.value['version'], 3),
278             previous_block=self.current_work.value['previous_block'],
279             merkle_link=merkle_link,
280             coinb1=packed_gentx[:-4-4],
281             coinb2=packed_gentx[-4:],
282             timestamp=gentx['timestamp'],
283             bits=self.current_work.value['bits'],
284             share_target=target,
285         )
286         
287         received_header_hashes = set()
288         
289         def got_response(header, user, coinbase_nonce):
290             assert len(coinbase_nonce) == self.COINBASE_NONCE_LENGTH == 4
291             new_packed_gentx = packed_gentx[:-4-4] + coinbase_nonce + packed_gentx[-4:] if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else packed_gentx
292             new_gentx = bitcoin_data.tx_type.unpack(new_packed_gentx) if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else gentx
293             
294             header_hash = self.node.net.PARENT.BLOCKHASH_FUNC(bitcoin_data.block_header_type.pack(header))
295             pow_hash = self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
296             try:
297                 if header['timestamp'] > new_gentx['timestamp'] + 3600:
298                     print 
299                     print header['timestamp'], '>', new_gentx['timestamp'] + 3600
300                     print 'Coinbase timestamp is too early!'
301                     print 
302
303                     return
304
305                 if header['timestamp'] < new_gentx['timestamp']:
306                     print 
307                     print header['timestamp'], '<', new_gentx['timestamp']
308                     print 'Block header timestamp is before coinbase timestamp!'
309                     print 
310                     return
311
312                 if pow_hash <= header['bits'].target or p2pool.DEBUG:
313                     helper.submit_block(dict(header=header, txs=[new_gentx] + other_transactions, signature=''), False, self.node.factory, self.node.bitcoind, self.node.bitcoind_work, self.node.net)
314                     if pow_hash <= header['bits'].target:
315                         print
316                         print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
317                         print
318             except:
319                 log.err(None, 'Error while processing potential block:')
320             
321             user, _, _, _ = self.get_user_details(user)
322             assert header['previous_block'] == ba['previous_block']
323             assert header['merkle_root'] == bitcoin_data.check_merkle_link(bitcoin_data.hash256(new_packed_gentx), merkle_link)
324             assert header['bits'] == ba['bits']
325             
326             on_time = self.new_work_event.times == lp_count
327             
328             for aux_work, index, hashes in mm_later:
329                 try:
330                     if pow_hash <= aux_work['target'] or p2pool.DEBUG:
331                         df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)(
332                             pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
333                             bitcoin_data.aux_pow_type.pack(dict(
334                                 merkle_tx=dict(
335                                     tx=new_gentx,
336                                     block_hash=header_hash,
337                                     merkle_link=merkle_link,
338                                 ),
339                                 merkle_link=bitcoin_data.calculate_merkle_link(hashes, index),
340                                 parent_block_header=header,
341                             )).encode('hex'),
342                         )
343                         @df.addCallback
344                         def _(result, aux_work=aux_work):
345                             if result != (pow_hash <= aux_work['target']):
346                                 print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
347                             else:
348                                 print 'Merged block submittal result: %s' % (result,)
349                         @df.addErrback
350                         def _(err):
351                             log.err(err, 'Error submitting merged block:')
352                 except:
353                     log.err(None, 'Error while processing merged mining POW:')
354             
355             if pow_hash <= share_info['bits'].target and header_hash not in received_header_hashes:
356                 share = get_share(header, pack.IntType(32).unpack(coinbase_nonce))
357                 
358                 print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
359                     user,
360                     p2pool_data.format_hash(share.hash),
361                     p2pool_data.format_hash(share.previous_hash),
362                     time.time() - getwork_time,
363                     ' DEAD ON ARRIVAL' if not on_time else '',
364                 )
365                 self.my_share_hashes.add(share.hash)
366                 if not on_time:
367                     self.my_doa_share_hashes.add(share.hash)
368                 
369                 self.node.tracker.add(share)
370                 self.node.set_best_share()
371                 
372                 try:
373                     if (pow_hash <= header['bits'].target or p2pool.DEBUG) and self.node.p2p_node is not None:
374                         self.node.p2p_node.broadcast_share(share.hash)
375                 except:
376                     log.err(None, 'Error forwarding block solution:')
377                 
378                 self.share_received.happened(bitcoin_data.target_to_average_attempts(share.target), not on_time)
379             
380             if pow_hash > target:
381                 print 'Worker %s submitted share with hash > target:' % (user,)
382                 print '    Hash:   %56x' % (pow_hash,)
383                 print '    Target: %56x' % (target,)
384             elif header_hash in received_header_hashes:
385                 print >>sys.stderr, 'Worker %s submitted share more than once!' % (user,)
386             else:
387                 received_header_hashes.add(header_hash)
388                 
389                 self.pseudoshare_received.happened(bitcoin_data.target_to_average_attempts(target), not on_time, user)
390                 self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
391                 while len(self.recent_shares_ts_work) > 50:
392                     self.recent_shares_ts_work.pop(0)
393                 self.local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=user))
394             
395             return on_time
396         
397         return ba, got_response