Add timestamp offset for block header
[p2pool.git] / p2pool / work.py
1 from __future__ import division
2
3 import base64
4 import random
5 import re
6 import sys
7 import time
8
9 from twisted.internet import defer
10 from twisted.python import log
11
12 import bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
13 from bitcoin import helper, script, worker_interface
14 from util import forest, jsonrpc, variable, deferral, math, pack
15 import p2pool, p2pool.data as p2pool_data
16
17 class WorkerBridge(worker_interface.WorkerBridge):
18     COINBASE_NONCE_LENGTH = 8
19     
20     def __init__(self, node, my_pubkey_hash, donation_percentage, merged_urls, worker_fee):
21         worker_interface.WorkerBridge.__init__(self)
22         self.recent_shares_ts_work = []
23         
24         self.node = node
25         self.my_pubkey_hash = my_pubkey_hash
26         self.donation_percentage = donation_percentage
27         self.worker_fee = worker_fee
28         
29         self.net = self.node.net.PARENT
30         self.running = True
31         self.pseudoshare_received = variable.Event()
32         self.share_received = variable.Event()
33         self.local_rate_monitor = math.RateMonitor(10*60)
34         self.local_addr_rate_monitor = math.RateMonitor(10*60)
35         
36         self.removed_unstales_var = variable.Variable((0, 0, 0))
37         self.removed_doa_unstales_var = variable.Variable(0)
38         
39         
40         self.my_share_hashes = set()
41         self.my_doa_share_hashes = set()
42         
43         self.tracker_view = forest.TrackerView(self.node.tracker, forest.get_attributedelta_type(dict(forest.AttributeDelta.attrs,
44             my_count=lambda share: 1 if share.hash in self.my_share_hashes else 0,
45             my_doa_count=lambda share: 1 if share.hash in self.my_doa_share_hashes else 0,
46             my_orphan_announce_count=lambda share: 1 if share.hash in self.my_share_hashes and share.share_data['stale_info'] == 'orphan' else 0,
47             my_dead_announce_count=lambda share: 1 if share.hash in self.my_share_hashes and share.share_data['stale_info'] == 'doa' else 0,
48         )))
49         
50         @self.node.tracker.verified.removed.watch
51         def _(share):
52             if share.hash in self.my_share_hashes and self.node.tracker.is_child_of(share.hash, self.node.best_share_var.value):
53                 assert share.share_data['stale_info'] in [None, 'orphan', 'doa'] # we made these shares in this instance
54                 self.removed_unstales_var.set((
55                     self.removed_unstales_var.value[0] + 1,
56                     self.removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 'orphan' else 0),
57                     self.removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 'doa' else 0),
58                 ))
59             if share.hash in self.my_doa_share_hashes and self.node.tracker.is_child_of(share.hash, self.node.best_share_var.value):
60                 self.removed_doa_unstales_var.set(self.removed_doa_unstales_var.value + 1)
61         
62         # MERGED WORK
63         
64         self.merged_work = variable.Variable({})
65         
66         @defer.inlineCallbacks
67         def set_merged_work(merged_url, merged_userpass):
68             merged_proxy = jsonrpc.HTTPProxy(merged_url, dict(Authorization='Basic ' + base64.b64encode(merged_userpass)))
69             while self.running:
70                 auxblock = yield deferral.retry('Error while calling merged getauxblock on %s:' % (merged_url,), 30)(merged_proxy.rpc_getauxblock)()
71                 self.merged_work.set(math.merge_dicts(self.merged_work.value, {auxblock['chainid']: dict(
72                     hash=int(auxblock['hash'], 16),
73                     target='p2pool' if auxblock['target'] == 'p2pool' else pack.IntType(256).unpack(auxblock['target'].decode('hex')),
74                     merged_proxy=merged_proxy,
75                 )}))
76                 yield deferral.sleep(1)
77         for merged_url, merged_userpass in merged_urls:
78             set_merged_work(merged_url, merged_userpass)
79         
80         @self.merged_work.changed.watch
81         def _(new_merged_work):
82             print 'Got new merged mining work!'
83         
84         # COMBINE WORK
85         
86         self.current_work = variable.Variable(None)
87         def compute_work():
88             t = self.node.bitcoind_work.value
89             bb = self.node.best_block_header.value
90             if bb is not None and bb['previous_block'] == t['previous_block'] and bitcoin_data.scrypt(bitcoin_data.block_header_type.pack(bb)) <= t['bits'].target:
91                 print 'Skipping from block %x to block %x!' % (bb['previous_block'],
92                     bitcoin_data.hash256(bitcoin_data.block_header_type.pack(bb)))
93                 t = dict(
94                     version=bb['version'],
95                     previous_block=bitcoin_data.scrypt(bitcoin_data.block_header_type.pack(bb)),
96                     bits=self.node.pow_bits, # not always true
97                     coinbaseflags='',
98                     height=t['height'] + 1,
99                     time=t['time'] + 30, # better way?
100                     transactions=[],
101                     transaction_fees=[],
102                     txn_timestamp=0,
103                     merkle_link=bitcoin_data.calculate_merkle_link([None], 0),
104                     subsidy=self.node.pow_subsidy,
105                     last_update=self.node.bitcoind_work.value['last_update'],
106                 )
107             
108             self.current_work.set(t)
109         self.node.bitcoind_work.changed.watch(lambda _: compute_work())
110         self.node.best_block_header.changed.watch(lambda _: compute_work())
111         compute_work()
112         
113         self.new_work_event = variable.Event()
114         @self.current_work.transitioned.watch
115         def _(before, after):
116             # trigger LP if version/previous_block/bits changed or transactions changed from nothing
117             if any(before[x] != after[x] for x in ['version', 'previous_block', 'bits']) or (not before['transactions'] and after['transactions']):
118                 self.new_work_event.happened()
119         self.merged_work.changed.watch(lambda _: self.new_work_event.happened())
120         self.node.best_share_var.changed.watch(lambda _: self.new_work_event.happened())
121     
122     def stop(self):
123         self.running = False
124     
125     def get_stale_counts(self):
126         '''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)'''
127         my_shares = len(self.my_share_hashes)
128         my_doa_shares = len(self.my_doa_share_hashes)
129         delta = self.tracker_view.get_delta_to_last(self.node.best_share_var.value)
130         my_shares_in_chain = delta.my_count + self.removed_unstales_var.value[0]
131         my_doa_shares_in_chain = delta.my_doa_count + self.removed_doa_unstales_var.value
132         orphans_recorded_in_chain = delta.my_orphan_announce_count + self.removed_unstales_var.value[1]
133         doas_recorded_in_chain = delta.my_dead_announce_count + self.removed_unstales_var.value[2]
134         
135         my_shares_not_in_chain = my_shares - my_shares_in_chain
136         my_doa_shares_not_in_chain = my_doa_shares - my_doa_shares_in_chain
137         
138         return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
139     
140     def get_user_details(self, username):
141         contents = re.split('([+/])', username)
142         assert len(contents) % 2 == 1
143         
144         user, contents2 = contents[0], contents[1:]
145         
146         desired_pseudoshare_target = None
147         desired_share_target = None
148         for symbol, parameter in zip(contents2[::2], contents2[1::2]):
149             if symbol == '+':
150                 try:
151                     desired_pseudoshare_target = bitcoin_data.difficulty_to_target(float(parameter))
152                 except:
153                     if p2pool.DEBUG:
154                         log.err()
155             elif symbol == '/':
156                 try:
157                     desired_share_target = bitcoin_data.difficulty_to_target(float(parameter))
158                 except:
159                     if p2pool.DEBUG:
160                         log.err()
161         
162         if random.uniform(0, 100) < self.worker_fee:
163             pubkey_hash = self.my_pubkey_hash
164         else:
165             try:
166                 pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, self.node.net.PARENT)
167             except: # XXX blah
168                 pubkey_hash = self.my_pubkey_hash
169         
170         return user, pubkey_hash, desired_share_target, desired_pseudoshare_target
171     
172     def preprocess_request(self, user):
173         if (self.node.p2p_node is None or len(self.node.p2p_node.peers) == 0) and self.node.net.PERSIST:
174             raise jsonrpc.Error_for_code(-12345)(u'p2pool is not connected to any peers')
175         if time.time() > self.current_work.value['last_update'] + 60:
176             raise jsonrpc.Error_for_code(-12345)(u'lost contact with bitcoind')
177         user, pubkey_hash, desired_share_target, desired_pseudoshare_target = self.get_user_details(user)
178         return pubkey_hash, desired_share_target, desired_pseudoshare_target
179     
180     def _estimate_local_hash_rate(self):
181         if len(self.recent_shares_ts_work) == 50:
182             hash_rate = sum(work for ts, work in self.recent_shares_ts_work[1:])//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
183             if hash_rate:
184                 return hash_rate
185         return None
186     
187     def get_local_rates(self):
188         miner_hash_rates = {}
189         miner_dead_hash_rates = {}
190         datums, dt = self.local_rate_monitor.get_datums_in_last()
191         for datum in datums:
192             miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
193             if datum['dead']:
194                 miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
195         return miner_hash_rates, miner_dead_hash_rates
196     
197     def get_local_addr_rates(self):
198         addr_hash_rates = {}
199         datums, dt = self.local_addr_rate_monitor.get_datums_in_last()
200         for datum in datums:
201             addr_hash_rates[datum['pubkey_hash']] = addr_hash_rates.get(datum['pubkey_hash'], 0) + datum['work']/dt
202         return addr_hash_rates
203     
204     def get_work(self, pubkey_hash, desired_share_target, desired_pseudoshare_target):
205         if self.node.best_share_var.value is None and self.node.net.PERSIST:
206             raise jsonrpc.Error_for_code(-12345)(u'p2pool is downloading shares')
207         
208         if self.merged_work.value:
209             tree, size = bitcoin_data.make_auxpow_tree(self.merged_work.value)
210             mm_hashes = [self.merged_work.value.get(tree.get(i), dict(hash=0))['hash'] for i in xrange(size)]
211             mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(dict(
212                 merkle_root=bitcoin_data.merkle_hash(mm_hashes),
213                 size=size,
214                 nonce=0,
215             ))
216             mm_later = [(aux_work, mm_hashes.index(aux_work['hash']), mm_hashes) for chain_id, aux_work in self.merged_work.value.iteritems()]
217         else:
218             mm_data = ''
219             mm_later = []
220         
221         tx_hashes = [bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in self.current_work.value['transactions']]
222         tx_map = dict(zip(tx_hashes, self.current_work.value['transactions']))
223         txn_timestamp = self.current_work.value['txn_timestamp']
224         
225         previous_share = self.node.tracker.items[self.node.best_share_var.value] if self.node.best_share_var.value is not None else None
226         if previous_share is None:
227             share_type = p2pool_data.Share
228         else:
229             previous_share_type = type(previous_share)
230             
231             if previous_share_type.SUCCESSOR is None or self.node.tracker.get_height(previous_share.hash) < self.node.net.CHAIN_LENGTH:
232                 share_type = previous_share_type
233             else:
234                 successor_type = previous_share_type.SUCCESSOR
235                 
236                 counts = p2pool_data.get_desired_version_counts(self.node.tracker,
237                     self.node.tracker.get_nth_parent_hash(previous_share.hash, self.node.net.CHAIN_LENGTH*9//10), self.node.net.CHAIN_LENGTH//10)
238                 upgraded = counts.get(successor_type.VERSION, 0)/sum(counts.itervalues())
239                 if upgraded > .65:
240                     print 'Switchover imminent. Upgraded: %.3f%% Threshold: %.3f%%' % (upgraded*100, 95)
241                 print 
242                 # Share -> NewShare only valid if 95% of hashes in [net.CHAIN_LENGTH*9//10, net.CHAIN_LENGTH] for new version
243                 if counts.get(successor_type.VERSION, 0) > sum(counts.itervalues())*95//100:
244                     share_type = successor_type
245                 else:
246                     share_type = previous_share_type
247         
248         if desired_share_target is None:
249             desired_share_target = 2**256-1
250             local_hash_rate = self._estimate_local_hash_rate()
251             if local_hash_rate is not None:
252                 desired_share_target = min(desired_share_target,
253                     bitcoin_data.average_attempts_to_target(local_hash_rate * self.node.net.SHARE_PERIOD / 0.0167)) # limit to 1.67% of pool shares by modulating share difficulty
254             
255             local_addr_rates = self.get_local_addr_rates()
256             lookbehind = 3600//self.node.net.SHARE_PERIOD
257             block_subsidy = self.node.bitcoind_work.value['subsidy']
258             if previous_share is not None and self.node.tracker.get_height(previous_share.hash) > lookbehind:
259                 expected_payout_per_block = local_addr_rates.get(pubkey_hash, 0)/p2pool_data.get_pool_attempts_per_second(self.node.tracker, self.node.best_share_var.value, lookbehind) \
260                     * block_subsidy*(1-self.donation_percentage/100) # XXX doesn't use global stale rate to compute pool hash
261                 if expected_payout_per_block < self.node.net.PARENT.DUST_THRESHOLD:
262                     desired_share_target = min(desired_share_target,
263                         bitcoin_data.average_attempts_to_target((bitcoin_data.target_to_average_attempts(self.node.bitcoind_work.value['bits'].target)*self.node.net.SPREAD)*self.node.net.PARENT.DUST_THRESHOLD/block_subsidy)
264                     )
265         
266         if True:
267             desired_timestamp = int(time.time() + 0.5)
268             share_info, gentx, other_transaction_hashes, get_share = share_type.generate_transaction(
269                 tracker=self.node.tracker,
270                 share_data=dict(
271                     previous_share_hash=self.node.best_share_var.value,
272                     coinbase=(script.create_push_script([
273                         self.current_work.value['height'],
274                         ] + ([mm_data] if mm_data else []) + [
275                     ]) + self.current_work.value['coinbaseflags'])[:100],
276                     nonce=random.randrange(2**32),
277                     pubkey_hash=pubkey_hash,
278                     subsidy=self.current_work.value['subsidy'],
279                     donation=math.perfect_round(65535*self.donation_percentage/100),
280                     stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
281                         'orphan' if orphans > orphans_recorded_in_chain else
282                         'doa' if doas > doas_recorded_in_chain else
283                         None
284                     )(*self.get_stale_counts()),
285                     desired_version=(share_type.SUCCESSOR if share_type.SUCCESSOR is not None else share_type).VOTING_VERSION,
286                 ),
287                 block_target=self.current_work.value['bits'].target,
288                 desired_timestamp=desired_timestamp if txn_timestamp < desired_timestamp else txn_timestamp + 1,
289                 desired_target=desired_share_target,
290                 ref_merkle_link=dict(branch=[], index=0),
291                 desired_other_transaction_hashes_and_fees=zip(tx_hashes, self.current_work.value['transaction_fees']),
292                 net=self.node.net,
293                 known_txs=tx_map,
294                 base_subsidy=self.node.pow_subsidy,
295                 #base_subsidy=self.node.net.PARENT.SUBSIDY_FUNC(self.current_work.value['height']),
296             )
297         
298         packed_gentx = bitcoin_data.tx_type.pack(gentx)
299         other_transactions = [tx_map[tx_hash] for tx_hash in other_transaction_hashes]
300         
301         mm_later = [(dict(aux_work, target=aux_work['target'] if aux_work['target'] != 'p2pool' else share_info['bits'].target), index, hashes) for aux_work, index, hashes in mm_later]
302         
303         if desired_pseudoshare_target is None:
304             target = 2**256-1
305             local_hash_rate = self._estimate_local_hash_rate()
306             if local_hash_rate is not None:
307                 target = min(target,
308                     bitcoin_data.average_attempts_to_target(local_hash_rate * 1)) # limit to 1 share response every second by modulating pseudoshare difficulty
309         else:
310             target = desired_pseudoshare_target
311         target = max(target, share_info['bits'].target)
312         for aux_work, index, hashes in mm_later:
313             target = max(target, aux_work['target'])
314         target = math.clip(target, self.node.net.PARENT.SANE_TARGET_RANGE)
315         
316         getwork_time = time.time()
317         lp_count = self.new_work_event.times
318         merkle_link = bitcoin_data.calculate_merkle_link([None] + other_transaction_hashes, 0)
319         
320         print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % (
321             bitcoin_data.target_to_difficulty(target),
322             bitcoin_data.target_to_difficulty(share_info['bits'].target),
323             self.current_work.value['subsidy']*1e-6, self.node.net.PARENT.SYMBOL,
324             len(self.current_work.value['transactions']),
325         )
326         
327         ba = dict(
328             version=min(self.current_work.value['version'], 2),
329             previous_block=self.current_work.value['previous_block'],
330             merkle_link=merkle_link,
331             coinb1=packed_gentx[:-self.COINBASE_NONCE_LENGTH-4],
332             coinb2=packed_gentx[-4:],
333             timestamp=self.current_work.value['time'] + 100,
334             bits=self.current_work.value['bits'],
335             share_target=target,
336         )
337         
338         received_header_hashes = set()
339         
340         def got_response(header, user, coinbase_nonce):
341             assert len(coinbase_nonce) == self.COINBASE_NONCE_LENGTH
342             new_packed_gentx = packed_gentx[:-self.COINBASE_NONCE_LENGTH-4] + coinbase_nonce + packed_gentx[-4:] if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else packed_gentx
343             new_gentx = bitcoin_data.tx_type.unpack(new_packed_gentx) if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else gentx
344             
345             header_hash = pow_hash = bitcoin_data.scrypt(bitcoin_data.block_header_type.pack(header))
346             try:
347                 if pow_hash <= header['bits'].target or p2pool.DEBUG:
348                     helper.submit_block(dict(header=header, txs=[new_gentx] + other_transactions, signature=''), False, self.node.factory, self.node.bitcoind, self.node.bitcoind_work, self.node.net)
349                     if pow_hash <= header['bits'].target:
350                         print
351                         print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
352                         print
353             except:
354                 log.err(None, 'Error while processing potential block:')
355             
356             user, _, _, _ = self.get_user_details(user)
357             assert header['previous_block'] == ba['previous_block']
358             assert header['merkle_root'] == bitcoin_data.check_merkle_link(bitcoin_data.hash256(new_packed_gentx), merkle_link)
359             assert header['bits'] == ba['bits']
360             
361             on_time = self.new_work_event.times == lp_count
362             
363             for aux_work, index, hashes in mm_later:
364                 try:
365                     if pow_hash <= aux_work['target'] or p2pool.DEBUG:
366                         df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)(
367                             pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
368                             bitcoin_data.aux_pow_type.pack(dict(
369                                 merkle_tx=dict(
370                                     tx=new_gentx,
371                                     block_hash=header_hash,
372                                     merkle_link=merkle_link,
373                                 ),
374                                 merkle_link=bitcoin_data.calculate_merkle_link(hashes, index),
375                                 parent_block_header=header,
376                             )).encode('hex'),
377                         )
378                         @df.addCallback
379                         def _(result, aux_work=aux_work):
380                             if result != (pow_hash <= aux_work['target']):
381                                 print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
382                             else:
383                                 print 'Merged block submittal result: %s' % (result,)
384                         @df.addErrback
385                         def _(err):
386                             log.err(err, 'Error submitting merged block:')
387                 except:
388                     log.err(None, 'Error while processing merged mining POW:')
389             
390             if pow_hash <= share_info['bits'].target and header_hash not in received_header_hashes:
391                 last_txout_nonce = pack.IntType(8*self.COINBASE_NONCE_LENGTH).unpack(coinbase_nonce)
392                 share = get_share(header, last_txout_nonce)
393                 
394                 print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
395                     user,
396                     p2pool_data.format_hash(share.hash),
397                     p2pool_data.format_hash(share.previous_hash),
398                     time.time() - getwork_time,
399                     ' DEAD ON ARRIVAL' if not on_time else '',
400                 )
401                 self.my_share_hashes.add(share.hash)
402                 if not on_time:
403                     self.my_doa_share_hashes.add(share.hash)
404                 
405                 self.node.tracker.add(share)
406                 self.node.set_best_share()
407                 
408                 try:
409                     if (pow_hash <= header['bits'].target or p2pool.DEBUG) and self.node.p2p_node is not None:
410                         self.node.p2p_node.broadcast_share(share.hash)
411                 except:
412                     log.err(None, 'Error forwarding block solution:')
413                 
414                 self.share_received.happened(bitcoin_data.target_to_average_attempts(share.target), not on_time, share.hash)
415             
416             if pow_hash > target:
417                 print 'Worker %s submitted share with hash > target:' % (user,)
418                 print '    Hash:   %56x' % (pow_hash,)
419                 print '    Target: %56x' % (target,)
420             elif header_hash in received_header_hashes:
421                 print >>sys.stderr, 'Worker %s submitted share more than once!' % (user,)
422             else:
423                 received_header_hashes.add(header_hash)
424                 
425                 self.pseudoshare_received.happened(bitcoin_data.target_to_average_attempts(target), not on_time, user)
426                 self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
427                 while len(self.recent_shares_ts_work) > 50:
428                     self.recent_shares_ts_work.pop(0)
429                 self.local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=user, share_target=share_info['bits'].target))
430                 self.local_addr_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), pubkey_hash=pubkey_hash))
431             
432             return on_time
433         
434         return ba, got_response