removed unused Share.stored attribute
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2
3 from __future__ import division
4
5 import argparse
6 import datetime
7 import itertools
8 import os
9 import random
10 import sqlite3
11 import struct
12 import sys
13 import time
14 import json
15 import signal
16 import traceback
17
18 from twisted.internet import defer, reactor, task
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
22
23 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
24 from util import db, expiring_dict, jsonrpc, variable, deferral, math
25 from . import p2p, worker_interface, skiplists
26 import p2pool.data as p2pool
27 import p2pool as p2pool_init
28
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind, ht, net):
32     work = yield bitcoind.rpc_getmemorypool()
33     defer.returnValue(dict(
34         version=work['version'],
35         previous_block_hash=int(work['previousblockhash'], 16),
36         transactions=[bitcoin.data.tx_type.unpack(x.decode('hex')) for x in work['transactions']],
37         subsidy=work['coinbasevalue'],
38         time=work['time'],
39         target=bitcoin.data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin.data.FloatingInteger(work['bits']),
40     ))
41
42 @deferral.retry('Error getting payout script from bitcoind:', 1)
43 @defer.inlineCallbacks
44 def get_payout_script(factory):
45     res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
46     if res['reply'] == 'success':
47         defer.returnValue(res['script'])
48     elif res['reply'] == 'denied':
49         defer.returnValue(None)
50     else:
51         raise ValueError('Unexpected reply: %r' % (res,))
52
53 @deferral.retry('Error creating payout script:', 10)
54 @defer.inlineCallbacks
55 def get_payout_script2(bitcoind, net):
56     defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
57
58 @defer.inlineCallbacks
59 def main(args):
60     try:
61         print 'p2pool (version %s)' % (p2pool_init.__version__,)
62         print
63         try:
64             from . import draw
65         except ImportError:
66             draw = None
67             print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
68             print
69         
70         # connect to bitcoind over JSON-RPC and do initial getwork
71         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
72         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
73         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
74         good = yield deferral.retry('Error while checking bitcoind identity:', 1)(args.net.BITCOIN_RPC_CHECK)(bitcoind)
75         if not good:
76             print "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
77             return
78         temp_work = yield deferral.retry('Error while testing getwork:', 1)(defer.inlineCallbacks(lambda: defer.returnValue(bitcoin.getwork.BlockAttempt.from_getwork((yield bitcoind.rpc_getwork())))))()
79         print '    ...success!'
80         print '    Current block hash: %x' % (temp_work.previous_block,)
81         print
82         
83         # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
84         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
85         factory = bitcoin.p2p.ClientFactory(args.net)
86         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
87         my_script = yield get_payout_script(factory)
88         if args.pubkey_hash is None:
89             if my_script is None:
90                 print '    IP transaction denied ... falling back to sending to address.'
91                 my_script = yield get_payout_script2(bitcoind, args.net)
92         else:
93             my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
94         print '    ...success!'
95         print '    Payout script:', bitcoin.data.script2_to_human(my_script, args.net)
96         print
97         
98         print 'Loading cached block headers...'
99         ht = bitcoin.p2p.HeightTracker(factory, args.net.NAME + '_headers.dat')
100         print '   ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
101         print
102         
103         tracker = p2pool.OkayTracker(args.net)
104         ss = p2pool.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.NAME + '_shares.'), args.net)
105         known_verified = set()
106         print "Loading shares..."
107         for i, (mode, contents) in enumerate(ss.get_shares()):
108             if mode == 'share':
109                 if contents.hash in tracker.shares:
110                     continue
111                 contents.shared = True
112                 contents.time_seen = 0
113                 tracker.add(contents)
114                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
115                     print "    %i" % (len(tracker.shares),)
116             elif mode == 'verified_hash':
117                 known_verified.add(contents)
118             else:
119                 raise AssertionError()
120         print "    ...inserting %i verified shares..." % (len(known_verified),)
121         for h in known_verified:
122             if h not in tracker.shares:
123                 ss.forget_verified_share(h)
124                 continue
125             tracker.verified.add(tracker.shares[h])
126         print "    ...done loading %i shares!" % (len(tracker.shares),)
127         print
128         tracker.added.watch(lambda share: ss.add_share(share))
129         tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
130         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
131         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
132         
133         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
134         
135         # information affecting work that should trigger a long-polling update
136         current_work = variable.Variable(None)
137         # information affecting work that should not trigger a long-polling update
138         current_work2 = variable.Variable(None)
139         
140         work_updated = variable.Event()
141         
142         requested = expiring_dict.ExpiringDict(300)
143         
144         @defer.inlineCallbacks
145         def set_real_work1():
146             work = yield getwork(bitcoind, ht, args.net)
147             changed = work['previous_block_hash'] != current_work.value['previous_block'] if current_work.value is not None else True
148             current_work.set(dict(
149                 version=work['version'],
150                 previous_block=work['previous_block_hash'],
151                 target=work['target'],
152                 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
153                 aux_work=current_work.value['aux_work'] if current_work.value is not None else None,
154             ))
155             current_work2.set(dict(
156                 time=work['time'],
157                 transactions=work['transactions'],
158                 subsidy=work['subsidy'],
159                 clock_offset=time.time() - work['time'],
160                 last_update=time.time(),
161             ))
162             if changed:
163                 set_real_work2()
164         
165         def set_real_work2():
166             best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
167             
168             t = dict(current_work.value)
169             t['best_share_hash'] = best
170             current_work.set(t)
171             
172             t = time.time()
173             for peer2, share_hash in desired:
174                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
175                     continue
176                 last_request_time, count = requested.get(share_hash, (None, 0))
177                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
178                     continue
179                 potential_peers = set()
180                 for head in tracker.tails[share_hash]:
181                     potential_peers.update(peer_heads.get(head, set()))
182                 potential_peers = [peer for peer in potential_peers if peer.connected2]
183                 if count == 0 and peer2 is not None and peer2.connected2:
184                     peer = peer2
185                 else:
186                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
187                     if peer is None:
188                         continue
189                 
190                 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
191                 peer.send_getshares(
192                     hashes=[share_hash],
193                     parents=2000,
194                     stops=list(set(tracker.heads) | set(
195                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
196                     ))[:100],
197                 )
198                 requested[share_hash] = t, count + 1
199         
200         print 'Initializing work...'
201         yield set_real_work1()
202         set_real_work2()
203         print '    ...success!'
204         print
205         
206         @defer.inlineCallbacks
207         def set_merged_work():
208             if not args.merged_url:
209                 return
210             while True:
211                 merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
212                 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged.rpc_getauxblock)()
213                 x = dict(current_work.value)
214                 x['aux_work'] = dict(
215                     hash=int(auxblock['hash'], 16),
216                     target=bitcoin.data.HashType().unpack(auxblock['target'].decode('hex')),
217                     chain_id=auxblock['chainid'],
218                 )
219                 #print x['aux_work']
220                 current_work.set(x)
221                 yield deferral.sleep(1)
222         set_merged_work()
223         
224         start_time = time.time() - current_work2.value['clock_offset']
225         
226         # setup p2p logic and join p2pool network
227         
228         def share_share(share, ignore_peer=None):
229             for peer in p2p_node.peers.itervalues():
230                 if peer is ignore_peer:
231                     continue
232                 #if p2pool_init.DEBUG:
233                 #    print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
234                 peer.sendShares([share])
235             share.flag_shared()
236         
237         def p2p_shares(shares, peer=None):
238             if len(shares) > 5:
239                 print 'Processing %i shares...' % (len(shares),)
240             
241             new_count = 0
242             for share in shares:
243                 if share.hash in tracker.shares:
244                     #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
245                     continue
246                 
247                 new_count += 1
248                 
249                 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
250                 
251                 tracker.add(share)
252             
253             if shares and peer is not None:
254                 peer_heads.setdefault(shares[0].hash, set()).add(peer)
255             
256             if new_count:
257                 set_real_work2()
258             
259             if len(shares) > 5:
260                 print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*args.net.CHAIN_LENGTH)
261         
262         @tracker.verified.added.watch
263         def _(share):
264             if share.pow_hash <= share.header['target']:
265                 if factory.conn.value is not None:
266                     factory.conn.value.send_block(block=share.as_block(tracker, args.net))
267                 else:
268                     print 'No bitcoind connection! Erp!'
269                 print
270                 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.header_hash,)
271                 print
272         
273         def p2p_share_hashes(share_hashes, peer):
274             t = time.time()
275             get_hashes = []
276             for share_hash in share_hashes:
277                 if share_hash in tracker.shares:
278                     continue
279                 last_request_time, count = requested.get(share_hash, (None, 0))
280                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
281                     continue
282                 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
283                 get_hashes.append(share_hash)
284                 requested[share_hash] = t, count + 1
285             
286             if share_hashes and peer is not None:
287                 peer_heads.setdefault(share_hashes[0], set()).add(peer)
288             if get_hashes:
289                 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
290         
291         def p2p_get_shares(share_hashes, parents, stops, peer):
292             parents = min(parents, 1000//len(share_hashes))
293             stops = set(stops)
294             shares = []
295             for share_hash in share_hashes:
296                 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
297                     if share.hash in stops:
298                         break
299                     shares.append(share)
300             print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
301             peer.sendShares(shares, full=True)
302         
303         print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
304         
305         def parse(x):
306             if ':' in x:
307                 ip, port = x.split(':')
308                 return ip, int(port)
309             else:
310                 return x, args.net.P2P_PORT
311         
312         nodes = set([
313             ('72.14.191.28', args.net.P2P_PORT),
314             ('62.204.197.159', args.net.P2P_PORT),
315             ('142.58.248.28', args.net.P2P_PORT),
316             ('94.23.34.145', args.net.P2P_PORT),
317         ])
318         for host in [
319             'p2pool.forre.st',
320             'dabuttonfactory.com',
321         ]:
322             try:
323                 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
324             except:
325                 log.err(None, 'Error resolving bootstrap node IP:')
326
327         if args.net_name == 'litecoin':
328             nodes.add(((yield reactor.resolve('liteco.in')), args.net.P2P_PORT))
329         
330         p2p_node = p2p.Node(
331             current_work=current_work,
332             port=args.p2pool_port,
333             net=args.net,
334             addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.NAME),
335             mode=0 if args.low_bandwidth else 1,
336             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
337         )
338         p2p_node.handle_shares = p2p_shares
339         p2p_node.handle_share_hashes = p2p_share_hashes
340         p2p_node.handle_get_shares = p2p_get_shares
341         
342         p2p_node.start()
343         
344         # send share when the chain changes to their chain
345         def work_changed(new_work):
346             #print 'Work changed:', new_work
347             for share in tracker.get_chain_known(new_work['best_share_hash']):
348                 if share.shared:
349                     break
350                 share_share(share, share.peer)
351         current_work.changed.watch(work_changed)
352         
353         print '    ...success!'
354         print
355         
356         @defer.inlineCallbacks
357         def upnp_thread():
358             while True:
359                 try:
360                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
361                     if is_lan:
362                         pm = yield portmapper.get_port_mapper()
363                         yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
364                 except defer.TimeoutError:
365                     pass
366                 except:
367                     if p2pool_init.DEBUG:
368                         log.err(None, "UPnP error:")
369                 yield deferral.sleep(random.expovariate(1/120))
370         
371         if args.upnp:
372             upnp_thread()
373         
374         # start listening for workers with a JSON-RPC server
375         
376         print 'Listening for workers on port %i...' % (args.worker_port,)
377         
378         # setup worker logic
379         
380         merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
381         run_identifier = struct.pack('<I', random.randrange(2**32))
382         
383         share_counter = skiplists.CountsSkipList(tracker, run_identifier)
384         removed_unstales = set()
385         def get_share_counts(doa=False):
386             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
387             matching_in_chain = share_counter(current_work.value['best_share_hash'], max(0, height - 1)) | removed_unstales
388             shares_in_chain = my_shares & matching_in_chain
389             stale_shares = my_shares - matching_in_chain
390             if doa:
391                 stale_doa_shares = stale_shares & doa_shares
392                 stale_not_doa_shares = stale_shares - stale_doa_shares
393                 return len(shares_in_chain) + len(stale_shares), len(stale_doa_shares), len(stale_not_doa_shares)
394             return len(shares_in_chain) + len(stale_shares), len(stale_shares)
395         @tracker.verified.removed.watch
396         def _(share):
397             if share.hash in my_shares and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
398                 removed_unstales.add(share.hash)
399
400         def compute(state, payout_script):
401             if payout_script is None or random.uniform(0, 100) < args.worker_fee:
402                 payout_script = my_script
403             if state['best_share_hash'] is None and args.net.PERSIST:
404                 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
405             if len(p2p_node.peers) == 0 and args.net.PERSIST:
406                 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
407             if time.time() > current_work2.value['last_update'] + 60:
408                 raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
409             
410             if state['aux_work'] is not None:
411                 aux_str = '\xfa\xbemm' + bitcoin.data.HashType().pack(state['aux_work']['hash'])[::-1] + struct.pack('<ii', 1, 0)
412             else:
413                 aux_str = ''
414             
415             # XXX assuming generate_tx is smallish here..
416             def get_stale_frac():
417                 shares, stale_shares = get_share_counts()
418                 if shares == 0:
419                     return ""
420                 frac = stale_shares/shares
421                 return 2*struct.pack('<H', int(65535*frac + .5))
422             subsidy = current_work2.value['subsidy']
423             
424             
425             if int(time.time() - current_work2.value['clock_offset']) >= p2pool.TRANSITION_TIME:
426                 timestamp = current_work2.value['time']
427                 is_new = True
428                 previous_share = tracker.shares[state['best_share_hash']] if state['best_share_hash'] is not None else None
429                 new_share_info, generate_tx = p2pool.new_generate_transaction(
430                     tracker=tracker,
431                     new_share_data=dict(
432                         previous_share_hash=state['best_share_hash'],
433                         coinbase=aux_str,
434                         nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
435                         new_script=payout_script,
436                         subsidy=subsidy,
437                         donation=math.perfect_round(65535*args.donation_percentage/100),
438                         timestamp=math.clip(int(time.time() - current_work2.value['clock_offset']),
439                             (previous_share.timestamp - 60, previous_share.timestamp + 60),
440                         ) if previous_share is not None else int(time.time() - current_work2.value['clock_offset']),
441                         stale_frac=(lambda shares, stales:
442                             255 if shares == 0 else math.perfect_round(254*stales/shares)
443                         )(*get_share_counts()),
444                     ),
445                     block_target=state['target'],
446                     net=args.net,
447                 )
448             else:
449                 timestamp = int(time.time() - current_work2.value['clock_offset'])
450                 if state['best_share_hash'] is not None:
451                     timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
452                     if timestamp2 > timestamp:
453                         print 'Toff', timestamp2 - timestamp
454                         timestamp = timestamp2
455                 is_new = False
456                 share_info, generate_tx = p2pool.generate_transaction(
457                     tracker=tracker,
458                     previous_share_hash=state['best_share_hash'],
459                     new_script=payout_script,
460                     subsidy=subsidy,
461                     nonce=run_identifier + struct.pack('<H', random.randrange(2**16)) + aux_str + get_stale_frac(),
462                     block_target=state['target'],
463                     net=args.net,
464                 )
465             
466             print 'New work for worker! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (bitcoin.data.target_to_difficulty((new_share_info if is_new else share_info['share_data'])['target']), (generate_tx['tx_outs'][-1]['value']-subsidy//200)*1e-8, args.net.BITCOIN_SYMBOL, subsidy*1e-8, args.net.BITCOIN_SYMBOL, len(current_work2.value['transactions']))
467             #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
468             #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
469             transactions = [generate_tx] + list(current_work2.value['transactions'])
470             merkle_root = bitcoin.data.merkle_hash(transactions)
471             merkle_root_to_transactions[merkle_root] = is_new, new_share_info if is_new else share_info, transactions
472             
473             target2 = (new_share_info if is_new else share_info['share_data'])['target']
474             times[merkle_root] = time.time()
475             #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
476             return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
477         
478         my_shares = set()
479         doa_shares = set()
480         times = {}
481         
482         def got_response(data, user):
483             try:
484                 # match up with transactions
485                 header = bitcoin.getwork.decode_data(data)
486                 xxx = merkle_root_to_transactions.get(header['merkle_root'], None)
487                 if xxx is None:
488                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
489                     return False
490                 is_new, share_info, transactions = xxx
491                 new_share_info = share_info
492                 
493                 hash_ = bitcoin.data.block_header_type.hash256(header)
494                 
495                 pow_hash = args.net.BITCOIN_POW_FUNC(header)
496                 
497                 if pow_hash <= header['target'] or p2pool_init.DEBUG:
498                     if factory.conn.value is not None:
499                         factory.conn.value.send_block(block=dict(header=header, txs=transactions))
500                     else:
501                         print 'No bitcoind connection! Erp!'
502                     if pow_hash <= header['target']:
503                         print
504                         print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
505                         print
506                 
507                 if current_work.value['aux_work'] is not None and pow_hash <= current_work.value['aux_work']['target']:
508                     try:
509                         aux_pow = dict(
510                             merkle_tx=dict(
511                                 tx=transactions[0],
512                                 block_hash=hash_,
513                                 merkle_branch=[x['hash'] for x in p2pool.calculate_merkle_branch(transactions, 0)],
514                                 index=0,
515                             ),
516                             merkle_branch=[],
517                             index=0,
518                             parent_block_header=header,
519                         )
520                         
521                         a, b = transactions[0]['tx_ins'][0]['script'][-32-8:-8].encode('hex'), bitcoin.data.aux_pow_type.pack(aux_pow).encode('hex')
522                         #print a, b
523                         merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
524                         def _(res):
525                             print "MERGED RESULT:", res
526                         merged.rpc_getauxblock(a, b).addBoth(_)
527                     except:
528                         log.err(None, 'Error while processing merged mining POW:')
529                 
530                 target = (new_share_info if is_new else share_info['share_data'])['target']
531                 if pow_hash > target:
532                     print 'Worker submitted share with hash > target:\nhash  : %x\ntarget: %x' % (pow_hash, target)
533                     return False
534                 if is_new:
535                     share = p2pool.NewShare(args.net, header, new_share_info, other_txs=transactions[1:])
536                 else:
537                     share = p2pool.Share(args.net, header, share_info, other_txs=transactions[1:])
538                 my_shares.add(share.hash)
539                 if share.previous_hash != current_work.value['best_share_hash']:
540                     doa_shares.add(share.hash)
541                 print 'GOT SHARE! %s %s prev %s age %.2fs' % (user, p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[header['merkle_root']]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
542                 good = share.previous_hash == current_work.value['best_share_hash']
543                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
544                 p2p_shares([share])
545                 # eg. good = share.hash == current_work.value['best_share_hash'] here
546                 return good
547             except:
548                 log.err(None, 'Error processing data received from worker:')
549                 return False
550         
551         web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
552         
553         def get_rate():
554             if current_work.value['best_share_hash'] is not None:
555                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
556                 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 720))
557                 fracs = [read_stale_frac(share) for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if read_stale_frac(share) is not None]
558                 return json.dumps(int(att_s / (1. - (math.median(fracs) if fracs else 0))))
559             return json.dumps(None)
560         
561         def get_users():
562             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
563             weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
564             res = {}
565             for script in sorted(weights, key=lambda s: weights[s]):
566                 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
567             return json.dumps(res)
568         
569         class WebInterface(resource.Resource):
570             def __init__(self, func, mime_type):
571                 self.func, self.mime_type = func, mime_type
572             
573             def render_GET(self, request):
574                 request.setHeader('Content-Type', self.mime_type)
575                 return self.func()
576         
577         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
578         web_root.putChild('users', WebInterface(get_users, 'application/json'))
579         web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
580         if draw is not None:
581             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
582         
583         reactor.listenTCP(args.worker_port, server.Site(web_root))
584         
585         print '    ...success!'
586         print
587         
588         # done!
589         
590         # do new getwork when a block is heard on the p2p interface
591         
592         def new_block(block_hash):
593             work_updated.happened()
594         factory.new_block.watch(new_block)
595         
596         print 'Started successfully!'
597         print
598         
599         ht.updated.watch(set_real_work2)
600         
601         @defer.inlineCallbacks
602         def work1_thread():
603             while True:
604                 flag = work_updated.get_deferred()
605                 try:
606                     yield set_real_work1()
607                 except:
608                     log.err()
609                 yield defer.DeferredList([flag, deferral.sleep(random.uniform(1, 10))], fireOnOneCallback=True)
610         
611         @defer.inlineCallbacks
612         def work2_thread():
613             while True:
614                 try:
615                     set_real_work2()
616                 except:
617                     log.err()
618                 yield deferral.sleep(random.expovariate(1/20))
619         
620         work1_thread()
621         work2_thread()
622         
623         
624         if hasattr(signal, 'SIGALRM'):
625             def watchdog_handler(signum, frame):
626                 print 'Watchdog timer went off at:'
627                 traceback.print_stack()
628             
629             signal.signal(signal.SIGALRM, watchdog_handler)
630             task.LoopingCall(signal.alarm, 30).start(1)
631         
632         
633         def read_stale_frac(share):
634             if isinstance(share, p2pool.NewShare):
635                 return share.share_data['stale_frac']/254 if share.share_data['stale_frac'] != 255 else None
636             if len(share.nonce) < 4:
637                 return None
638             a, b = struct.unpack("<HH", share.nonce[-4:])
639             if a == 0 or a != b:
640                 return None
641             return a/65535
642
643         pool_str = None;
644         while True:
645             yield deferral.sleep(3)
646             try:
647                 if time.time() > current_work2.value['last_update'] + 60:
648                     print '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
649                 if current_work.value['best_share_hash'] is not None:
650                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
651                     if height > 2:
652                         att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 720))
653                         weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
654                         shares, stale_doa_shares, stale_not_doa_shares = get_share_counts(True)
655                         stale_shares = stale_doa_shares + stale_not_doa_shares
656                         fracs = [read_stale_frac(share) for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if read_stale_frac(share) is not None]
657                         str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i' % (
658                             math.format(int(att_s / (1. - (math.median(fracs) if fracs else 0)))),
659                             height,
660                             len(tracker.verified.shares),
661                             len(tracker.shares),
662                             weights.get(my_script, 0)/total_weight*100,
663                             math.format(int(weights.get(my_script, 0)*att_s//total_weight / (1. - (math.median(fracs) if fracs else 0)))),
664                             shares,
665                             stale_not_doa_shares,
666                             stale_doa_shares,
667                             len(p2p_node.peers),
668                         ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
669                         if (str != pool_str):
670                             print str;
671                             pool_str = str;
672                         if fracs:
673                             med = math.median(fracs)
674                             print 'Median stale proportion:', med
675                             if shares:
676                                 print '    Own:', stale_shares/shares
677                                 if med < .99:
678                                     print '    Own efficiency: %.02f%%' % (100*(1 - stale_shares/shares)/(1 - med),)
679             
680             
681             except:
682                 log.err()
683     except:
684         log.err(None, 'Fatal error:')
685     finally:
686         reactor.stop()
687
688 def run():
689     class FixedArgumentParser(argparse.ArgumentParser):
690         def _read_args_from_files(self, arg_strings):
691             # expand arguments referencing files
692             new_arg_strings = []
693             for arg_string in arg_strings:
694                 
695                 # for regular arguments, just add them back into the list
696                 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
697                     new_arg_strings.append(arg_string)
698                 
699                 # replace arguments referencing files with the file content
700                 else:
701                     try:
702                         args_file = open(arg_string[1:])
703                         try:
704                             arg_strings = []
705                             for arg_line in args_file.read().splitlines():
706                                 for arg in self.convert_arg_line_to_args(arg_line):
707                                     arg_strings.append(arg)
708                             arg_strings = self._read_args_from_files(arg_strings)
709                             new_arg_strings.extend(arg_strings)
710                         finally:
711                             args_file.close()
712                     except IOError:
713                         err = sys.exc_info()[1]
714                         self.error(str(err))
715             
716             # return the modified argument list
717             return new_arg_strings
718         
719         def convert_arg_line_to_args(self, arg_line):
720             return [arg for arg in arg_line.split() if arg.strip()]
721     
722     parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,), fromfile_prefix_chars='@')
723     parser.add_argument('--version', action='version', version=p2pool_init.__version__)
724     parser.add_argument('--net',
725         help='use specified network (default: bitcoin)',
726         action='store', choices=sorted(x for x in p2pool.nets if 'testnet' not in x), default='bitcoin', dest='net_name')
727     parser.add_argument('--testnet',
728         help='''use the network's testnet''',
729         action='store_const', const=True, default=False, dest='testnet')
730     parser.add_argument('--debug',
731         help='debugging mode',
732         action='store_const', const=True, default=False, dest='debug')
733     parser.add_argument('-a', '--address',
734         help='generate to this address (defaults to requesting one from bitcoind)',
735         type=str, action='store', default=None, dest='address')
736     parser.add_argument('--logfile',
737         help='''log to specific file (defaults to <network_name>.log in run_p2pool.py's directory)''',
738         type=str, action='store', default=None, dest='logfile')
739     parser.add_argument('--merged-url',
740         help='call getauxblock on this url to get work for merged mining',
741         type=str, action='store', default=None, dest='merged_url')
742     parser.add_argument('--merged-userpass',
743         help='merge daemon user and password, separated by a colon. Example: ncuser:ncpass',
744         type=str, action='store', default=None, dest='merged_userpass')
745     parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
746         help='percentage amount to donate to author of p2pool. Default: 0.5',
747         type=float, action='store', default=0.5, dest='donation_percentage')
748     
749     p2pool_group = parser.add_argument_group('p2pool interface')
750     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
751         help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
752         type=int, action='store', default=None, dest='p2pool_port')
753     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
754         help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
755         type=str, action='append', default=[], dest='p2pool_nodes')
756     parser.add_argument('-l', '--low-bandwidth',
757         help='trade lower bandwidth usage for higher latency (reduced efficiency)',
758         action='store_true', default=False, dest='low_bandwidth')
759     parser.add_argument('--disable-upnp',
760         help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
761         action='store_false', default=True, dest='upnp')
762     
763     worker_group = parser.add_argument_group('worker interface')
764     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
765         help='listen on PORT for RPC connections from miners asking for work and providing responses (default: bitcoin: 9332 namecoin: 9331 ixcoin: 9330 i0coin: 9329 solidcoin: 9328 litecoin: 9327, +10000 for testnets)',
766         type=int, action='store', default=None, dest='worker_port')
767     worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
768         help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:9332/fee . default: 0''',
769         type=float, action='store', default=0, dest='worker_fee')
770     
771     bitcoind_group = parser.add_argument_group('bitcoind interface')
772     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
773         help='connect to a bitcoind at this address (default: 127.0.0.1)',
774         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
775     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
776         help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332 ixcoin: 8338 i0coin: 7332 litecoin: 9332)',
777         type=int, action='store', default=None, dest='bitcoind_rpc_port')
778     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
779         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 namecoin: 8334 ixcoin: 8337 i0coin: 7333 solidcoin: 7555 litecoin: 9333, +10000 for testnets)',
780         type=int, action='store', default=None, dest='bitcoind_p2p_port')
781     
782     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSER',
783         help='bitcoind RPC interface username (default: empty)',
784         type=str, action='store', default='', nargs='?', dest='bitcoind_rpc_username')
785     bitcoind_group.add_argument(metavar='BITCOIND_RPCPASSWORD',
786         help='bitcoind RPC interface password',
787         type=str, action='store', dest='bitcoind_rpc_password')
788     
789     args = parser.parse_args()
790     
791     if args.debug:
792         p2pool_init.DEBUG = True
793     
794     if args.logfile is None:
795         args.logfile = os.path.join(os.path.dirname(sys.argv[0]), args.net_name + ('_testnet' if args.testnet else '') + '.log')
796     
797     class LogFile(object):
798         def __init__(self, filename):
799             self.filename = filename
800             self.inner_file = None
801             self.reopen()
802         def reopen(self):
803             if self.inner_file is not None:
804                 self.inner_file.close()
805             open(self.filename, 'a').close()
806             f = open(self.filename, 'rb')
807             f.seek(0, os.SEEK_END)
808             length = f.tell()
809             if length > 100*1000*1000:
810                 f.seek(-1000*1000, os.SEEK_END)
811                 while True:
812                     if f.read(1) in ('', '\n'):
813                         break
814                 data = f.read()
815                 f.close()
816                 f = open(self.filename, 'wb')
817                 f.write(data)
818             f.close()
819             self.inner_file = open(self.filename, 'a')
820         def write(self, data):
821             self.inner_file.write(data)
822         def flush(self):
823             self.inner_file.flush()
824     class TeePipe(object):
825         def __init__(self, outputs):
826             self.outputs = outputs
827         def write(self, data):
828             for output in self.outputs:
829                 output.write(data)
830         def flush(self):
831             for output in self.outputs:
832                 output.flush()
833     class TimestampingPipe(object):
834         def __init__(self, inner_file):
835             self.inner_file = inner_file
836             self.buf = ''
837             self.softspace = 0
838         def write(self, data):
839             buf = self.buf + data
840             lines = buf.split('\n')
841             for line in lines[:-1]:
842                 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
843                 self.inner_file.flush()
844             self.buf = lines[-1]
845         def flush(self):
846             pass
847     logfile = LogFile(args.logfile)
848     sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
849     if hasattr(signal, "SIGUSR1"):
850         def sigusr1(signum, frame):
851             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
852             logfile.reopen()
853             print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
854         signal.signal(signal.SIGUSR1, sigusr1)
855     task.LoopingCall(logfile.reopen).start(5)
856     
857     args.net = p2pool.nets[args.net_name + ('_testnet' if args.testnet else '')]
858     
859     if args.bitcoind_rpc_port is None:
860         args.bitcoind_rpc_port = args.net.BITCOIN_RPC_PORT
861     
862     if args.bitcoind_p2p_port is None:
863         args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
864     
865     if args.p2pool_port is None:
866         args.p2pool_port = args.net.P2P_PORT
867     
868     if args.worker_port is None:
869         args.worker_port = args.net.WORKER_PORT
870     
871     if args.address is not None:
872         try:
873             args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
874         except Exception, e:
875             parser.error('error parsing address: ' + repr(e))
876     else:
877         args.pubkey_hash = None
878     
879     if (args.merged_url is None) ^ (args.merged_userpass is None):
880         parser.error('must specify --merged-url and --merged-userpass')
881     
882     reactor.callWhenRunning(main, args)
883     reactor.run()