removed transaction ordering because payout script is not in coinbase anymore
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2
3 from __future__ import division
4
5 import argparse
6 import datetime
7 import itertools
8 import os
9 import random
10 import sqlite3
11 import struct
12 import sys
13 import time
14 import json
15 import signal
16 import traceback
17
18 from twisted.internet import defer, reactor, task
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
22
23 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
24 from util import db, expiring_dict, jsonrpc, variable, deferral, math
25 from . import p2p, worker_interface, skiplists
26 import p2pool.data as p2pool
27 import p2pool as p2pool_init
28
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind, ht, net):
32     work = yield bitcoind.rpc_getmemorypool()
33     defer.returnValue(dict(
34         version=work['version'],
35         previous_block_hash=int(work['previousblockhash'], 16),
36         transactions=[bitcoin.data.tx_type.unpack(x.decode('hex')) for x in work['transactions']],
37         subsidy=work['coinbasevalue'],
38         time=work['time'],
39         target=bitcoin.data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin.data.FloatingInteger(work['bits']),
40     ))
41
42 @deferral.retry('Error getting payout script from bitcoind:', 1)
43 @defer.inlineCallbacks
44 def get_payout_script(factory):
45     res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
46     if res['reply'] == 'success':
47         defer.returnValue(res['script'])
48     elif res['reply'] == 'denied':
49         defer.returnValue(None)
50     else:
51         raise ValueError('Unexpected reply: %r' % (res,))
52
53 @deferral.retry('Error creating payout script:', 10)
54 @defer.inlineCallbacks
55 def get_payout_script2(bitcoind, net):
56     defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
57
58 @defer.inlineCallbacks
59 def main(args):
60     try:
61         print 'p2pool (version %s)' % (p2pool_init.__version__,)
62         print
63         try:
64             from . import draw
65         except ImportError:
66             draw = None
67             print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
68             print
69         
70         # connect to bitcoind over JSON-RPC and do initial getwork
71         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
72         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
73         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
74         good = yield deferral.retry('Error while checking bitcoind identity:', 1)(args.net.BITCOIN_RPC_CHECK)(bitcoind)
75         if not good:
76             print "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
77             return
78         temp_work = yield deferral.retry('Error while testing getwork:', 1)(defer.inlineCallbacks(lambda: defer.returnValue(bitcoin.getwork.BlockAttempt.from_getwork((yield bitcoind.rpc_getwork())))))()
79         print '    ...success!'
80         print '    Current block hash: %x' % (temp_work.previous_block,)
81         print
82         
83         # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
84         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
85         factory = bitcoin.p2p.ClientFactory(args.net)
86         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
87         my_script = yield get_payout_script(factory)
88         if args.pubkey_hash is None:
89             if my_script is None:
90                 print '    IP transaction denied ... falling back to sending to address.'
91                 my_script = yield get_payout_script2(bitcoind, args.net)
92         else:
93             my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
94         print '    ...success!'
95         print '    Payout script:', bitcoin.data.script2_to_human(my_script, args.net)
96         print
97         
98         print 'Loading cached block headers...'
99         ht = bitcoin.p2p.HeightTracker(factory, args.net.NAME + '_headers.dat')
100         print '   ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
101         print
102         
103         tracker = p2pool.OkayTracker(args.net)
104         shared_share_hashes = set()
105         ss = p2pool.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.NAME + '_shares.'), args.net)
106         known_verified = set()
107         print "Loading shares..."
108         for i, (mode, contents) in enumerate(ss.get_shares()):
109             if mode == 'share':
110                 if contents.hash in tracker.shares:
111                     continue
112                 shared_share_hashes.add(contents.hash)
113                 contents.time_seen = 0
114                 tracker.add(contents)
115                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
116                     print "    %i" % (len(tracker.shares),)
117             elif mode == 'verified_hash':
118                 known_verified.add(contents)
119             else:
120                 raise AssertionError()
121         print "    ...inserting %i verified shares..." % (len(known_verified),)
122         for h in known_verified:
123             if h not in tracker.shares:
124                 ss.forget_verified_share(h)
125                 continue
126             tracker.verified.add(tracker.shares[h])
127         print "    ...done loading %i shares!" % (len(tracker.shares),)
128         print
129         tracker.added.watch(lambda share: ss.add_share(share))
130         tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
131         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
132         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
133         tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
134         
135         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
136         
137         # information affecting work that should trigger a long-polling update
138         current_work = variable.Variable(None)
139         # information affecting work that should not trigger a long-polling update
140         current_work2 = variable.Variable(None)
141         
142         work_updated = variable.Event()
143         
144         requested = expiring_dict.ExpiringDict(300)
145         
146         @defer.inlineCallbacks
147         def set_real_work1():
148             work = yield getwork(bitcoind, ht, args.net)
149             changed = work['previous_block_hash'] != current_work.value['previous_block'] if current_work.value is not None else True
150             current_work.set(dict(
151                 version=work['version'],
152                 previous_block=work['previous_block_hash'],
153                 target=work['target'],
154                 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
155                 aux_work=current_work.value['aux_work'] if current_work.value is not None else None,
156             ))
157             current_work2.set(dict(
158                 time=work['time'],
159                 transactions=work['transactions'],
160                 subsidy=work['subsidy'],
161                 clock_offset=time.time() - work['time'],
162                 last_update=time.time(),
163             ))
164             if changed:
165                 set_real_work2()
166         
167         def set_real_work2():
168             best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
169             
170             t = dict(current_work.value)
171             t['best_share_hash'] = best
172             current_work.set(t)
173             
174             t = time.time()
175             for peer2, share_hash in desired:
176                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
177                     continue
178                 last_request_time, count = requested.get(share_hash, (None, 0))
179                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
180                     continue
181                 potential_peers = set()
182                 for head in tracker.tails[share_hash]:
183                     potential_peers.update(peer_heads.get(head, set()))
184                 potential_peers = [peer for peer in potential_peers if peer.connected2]
185                 if count == 0 and peer2 is not None and peer2.connected2:
186                     peer = peer2
187                 else:
188                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
189                     if peer is None:
190                         continue
191                 
192                 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
193                 peer.send_getshares(
194                     hashes=[share_hash],
195                     parents=2000,
196                     stops=list(set(tracker.heads) | set(
197                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
198                     ))[:100],
199                 )
200                 requested[share_hash] = t, count + 1
201         
202         print 'Initializing work...'
203         yield set_real_work1()
204         set_real_work2()
205         print '    ...success!'
206         print
207         
208         @defer.inlineCallbacks
209         def set_merged_work():
210             if not args.merged_url:
211                 return
212             while True:
213                 merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
214                 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged.rpc_getauxblock)()
215                 x = dict(current_work.value)
216                 x['aux_work'] = dict(
217                     hash=int(auxblock['hash'], 16),
218                     target=bitcoin.data.HashType().unpack(auxblock['target'].decode('hex')),
219                     chain_id=auxblock['chainid'],
220                 )
221                 #print x['aux_work']
222                 current_work.set(x)
223                 yield deferral.sleep(1)
224         set_merged_work()
225         
226         start_time = time.time() - current_work2.value['clock_offset']
227         
228         # setup p2p logic and join p2pool network
229         
230         def share_share(share, ignore_peer=None):
231             for peer in p2p_node.peers.itervalues():
232                 if peer is ignore_peer:
233                     continue
234                 #if p2pool_init.DEBUG:
235                 #    print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
236                 peer.sendShares([share])
237             shared_share_hashes.add(share.hash)
238         
239         def p2p_shares(shares, peer=None):
240             if len(shares) > 5:
241                 print 'Processing %i shares...' % (len(shares),)
242             
243             new_count = 0
244             for share in shares:
245                 if share.hash in tracker.shares:
246                     #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
247                     continue
248                 
249                 new_count += 1
250                 
251                 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
252                 
253                 tracker.add(share)
254             
255             if shares and peer is not None:
256                 peer_heads.setdefault(shares[0].hash, set()).add(peer)
257             
258             if new_count:
259                 set_real_work2()
260             
261             if len(shares) > 5:
262                 print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*args.net.CHAIN_LENGTH)
263         
264         @tracker.verified.added.watch
265         def _(share):
266             if share.pow_hash <= share.header['target']:
267                 if factory.conn.value is not None:
268                     factory.conn.value.send_block(block=share.as_block(tracker, args.net))
269                 else:
270                     print 'No bitcoind connection! Erp!'
271                 print
272                 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.header_hash,)
273                 print
274         
275         def p2p_share_hashes(share_hashes, peer):
276             t = time.time()
277             get_hashes = []
278             for share_hash in share_hashes:
279                 if share_hash in tracker.shares:
280                     continue
281                 last_request_time, count = requested.get(share_hash, (None, 0))
282                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
283                     continue
284                 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
285                 get_hashes.append(share_hash)
286                 requested[share_hash] = t, count + 1
287             
288             if share_hashes and peer is not None:
289                 peer_heads.setdefault(share_hashes[0], set()).add(peer)
290             if get_hashes:
291                 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
292         
293         def p2p_get_shares(share_hashes, parents, stops, peer):
294             parents = min(parents, 1000//len(share_hashes))
295             stops = set(stops)
296             shares = []
297             for share_hash in share_hashes:
298                 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
299                     if share.hash in stops:
300                         break
301                     shares.append(share)
302             print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
303             peer.sendShares(shares, full=True)
304         
305         print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
306         
307         def parse(x):
308             if ':' in x:
309                 ip, port = x.split(':')
310                 return ip, int(port)
311             else:
312                 return x, args.net.P2P_PORT
313         
314         nodes = set([
315             ('72.14.191.28', args.net.P2P_PORT),
316             ('62.204.197.159', args.net.P2P_PORT),
317             ('142.58.248.28', args.net.P2P_PORT),
318             ('94.23.34.145', args.net.P2P_PORT),
319         ])
320         for host in [
321             'p2pool.forre.st',
322             'dabuttonfactory.com',
323         ]:
324             try:
325                 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
326             except:
327                 log.err(None, 'Error resolving bootstrap node IP:')
328
329         if args.net_name == 'litecoin':
330             nodes.add(((yield reactor.resolve('liteco.in')), args.net.P2P_PORT))
331         
332         p2p_node = p2p.Node(
333             current_work=current_work,
334             port=args.p2pool_port,
335             net=args.net,
336             addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.NAME),
337             mode=0 if args.low_bandwidth else 1,
338             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
339         )
340         p2p_node.handle_shares = p2p_shares
341         p2p_node.handle_share_hashes = p2p_share_hashes
342         p2p_node.handle_get_shares = p2p_get_shares
343         
344         p2p_node.start()
345         
346         # send share when the chain changes to their chain
347         def work_changed(new_work):
348             #print 'Work changed:', new_work
349             for share in tracker.get_chain_known(new_work['best_share_hash']):
350                 if share.hash in shared_share_hashes:
351                     break
352                 share_share(share, share.peer)
353         current_work.changed.watch(work_changed)
354         
355         print '    ...success!'
356         print
357         
358         @defer.inlineCallbacks
359         def upnp_thread():
360             while True:
361                 try:
362                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
363                     if is_lan:
364                         pm = yield portmapper.get_port_mapper()
365                         yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
366                 except defer.TimeoutError:
367                     pass
368                 except:
369                     if p2pool_init.DEBUG:
370                         log.err(None, "UPnP error:")
371                 yield deferral.sleep(random.expovariate(1/120))
372         
373         if args.upnp:
374             upnp_thread()
375         
376         # start listening for workers with a JSON-RPC server
377         
378         print 'Listening for workers on port %i...' % (args.worker_port,)
379         
380         # setup worker logic
381         
382         merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
383         run_identifier = struct.pack('<I', random.randrange(2**32))
384         
385         share_counter = skiplists.CountsSkipList(tracker, run_identifier)
386         removed_unstales = set()
387         def get_share_counts(doa=False):
388             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
389             matching_in_chain = share_counter(current_work.value['best_share_hash'], max(0, height - 1)) | removed_unstales
390             shares_in_chain = my_shares & matching_in_chain
391             stale_shares = my_shares - matching_in_chain
392             if doa:
393                 stale_doa_shares = stale_shares & doa_shares
394                 stale_not_doa_shares = stale_shares - stale_doa_shares
395                 return len(shares_in_chain) + len(stale_shares), len(stale_doa_shares), len(stale_not_doa_shares)
396             return len(shares_in_chain) + len(stale_shares), len(stale_shares)
397         @tracker.verified.removed.watch
398         def _(share):
399             if share.hash in my_shares and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
400                 removed_unstales.add(share.hash)
401
402         def compute(state, payout_script):
403             if payout_script is None or random.uniform(0, 100) < args.worker_fee:
404                 payout_script = my_script
405             if state['best_share_hash'] is None and args.net.PERSIST:
406                 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
407             if len(p2p_node.peers) == 0 and args.net.PERSIST:
408                 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
409             if time.time() > current_work2.value['last_update'] + 60:
410                 raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
411             
412             if state['aux_work'] is not None:
413                 aux_str = '\xfa\xbemm' + bitcoin.data.HashType().pack(state['aux_work']['hash'])[::-1] + struct.pack('<ii', 1, 0)
414             else:
415                 aux_str = ''
416             
417             # XXX assuming generate_tx is smallish here..
418             def get_stale_frac():
419                 shares, stale_shares = get_share_counts()
420                 if shares == 0:
421                     return ""
422                 frac = stale_shares/shares
423                 return 2*struct.pack('<H', int(65535*frac + .5))
424             subsidy = current_work2.value['subsidy']
425             
426             
427             if int(time.time() - current_work2.value['clock_offset']) >= p2pool.TRANSITION_TIME:
428                 timestamp = current_work2.value['time']
429                 is_new = True
430                 previous_share = tracker.shares[state['best_share_hash']] if state['best_share_hash'] is not None else None
431                 new_share_info, generate_tx = p2pool.new_generate_transaction(
432                     tracker=tracker,
433                     new_share_data=dict(
434                         previous_share_hash=state['best_share_hash'],
435                         coinbase=aux_str,
436                         nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
437                         new_script=payout_script,
438                         subsidy=subsidy,
439                         donation=math.perfect_round(65535*args.donation_percentage/100),
440                         timestamp=math.clip(int(time.time() - current_work2.value['clock_offset']),
441                             (previous_share.timestamp - 60, previous_share.timestamp + 60),
442                         ) if previous_share is not None else int(time.time() - current_work2.value['clock_offset']),
443                         stale_frac=(lambda shares, stales:
444                             255 if shares == 0 else math.perfect_round(254*stales/shares)
445                         )(*get_share_counts()),
446                     ),
447                     block_target=state['target'],
448                     net=args.net,
449                 )
450             else:
451                 timestamp = int(time.time() - current_work2.value['clock_offset'])
452                 if state['best_share_hash'] is not None:
453                     timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
454                     if timestamp2 > timestamp:
455                         print 'Toff', timestamp2 - timestamp
456                         timestamp = timestamp2
457                 is_new = False
458                 share_info, generate_tx = p2pool.generate_transaction(
459                     tracker=tracker,
460                     previous_share_hash=state['best_share_hash'],
461                     new_script=payout_script,
462                     subsidy=subsidy,
463                     nonce=run_identifier + struct.pack('<H', random.randrange(2**16)) + aux_str + get_stale_frac(),
464                     block_target=state['target'],
465                     net=args.net,
466                 )
467             
468             print 'New work for worker! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (bitcoin.data.target_to_difficulty((new_share_info if is_new else share_info['share_data'])['target']), (sum(t['value'] for t in generate_tx['tx_outs'] if t['script'] == payout_script) -subsidy//200)*1e-8, args.net.BITCOIN_SYMBOL, subsidy*1e-8, args.net.BITCOIN_SYMBOL, len(current_work2.value['transactions']))
469             #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
470             #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
471             transactions = [generate_tx] + list(current_work2.value['transactions'])
472             merkle_root = bitcoin.data.merkle_hash(transactions)
473             merkle_root_to_transactions[merkle_root] = is_new, new_share_info if is_new else share_info, transactions
474             
475             target2 = (new_share_info if is_new else share_info['share_data'])['target']
476             times[merkle_root] = time.time()
477             #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
478             return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
479         
480         my_shares = set()
481         doa_shares = set()
482         times = {}
483         
484         def got_response(data, user):
485             try:
486                 # match up with transactions
487                 header = bitcoin.getwork.decode_data(data)
488                 xxx = merkle_root_to_transactions.get(header['merkle_root'], None)
489                 if xxx is None:
490                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
491                     return False
492                 is_new, share_info, transactions = xxx
493                 new_share_info = share_info
494                 
495                 hash_ = bitcoin.data.block_header_type.hash256(header)
496                 
497                 pow_hash = args.net.BITCOIN_POW_FUNC(header)
498                 
499                 if pow_hash <= header['target'] or p2pool_init.DEBUG:
500                     if factory.conn.value is not None:
501                         factory.conn.value.send_block(block=dict(header=header, txs=transactions))
502                     else:
503                         print 'No bitcoind connection! Erp!'
504                     if pow_hash <= header['target']:
505                         print
506                         print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
507                         print
508                 
509                 if current_work.value['aux_work'] is not None and pow_hash <= current_work.value['aux_work']['target']:
510                     try:
511                         aux_pow = dict(
512                             merkle_tx=dict(
513                                 tx=transactions[0],
514                                 block_hash=hash_,
515                                 merkle_branch=[x['hash'] for x in p2pool.calculate_merkle_branch(transactions, 0)],
516                                 index=0,
517                             ),
518                             merkle_branch=[],
519                             index=0,
520                             parent_block_header=header,
521                         )
522                         
523                         a, b = transactions[0]['tx_ins'][0]['script'][-32-8:-8].encode('hex'), bitcoin.data.aux_pow_type.pack(aux_pow).encode('hex')
524                         #print a, b
525                         merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
526                         def _(res):
527                             print "MERGED RESULT:", res
528                         merged.rpc_getauxblock(a, b).addBoth(_)
529                     except:
530                         log.err(None, 'Error while processing merged mining POW:')
531                 
532                 target = (new_share_info if is_new else share_info['share_data'])['target']
533                 if pow_hash > target:
534                     print 'Worker submitted share with hash > target:\nhash  : %x\ntarget: %x' % (pow_hash, target)
535                     return False
536                 if is_new:
537                     share = p2pool.NewShare(args.net, header, new_share_info, other_txs=transactions[1:])
538                 else:
539                     share = p2pool.Share(args.net, header, share_info, other_txs=transactions[1:])
540                 my_shares.add(share.hash)
541                 if share.previous_hash != current_work.value['best_share_hash']:
542                     doa_shares.add(share.hash)
543                 print 'GOT SHARE! %s %s prev %s age %.2fs' % (user, p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[header['merkle_root']]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
544                 good = share.previous_hash == current_work.value['best_share_hash']
545                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
546                 p2p_shares([share])
547                 # eg. good = share.hash == current_work.value['best_share_hash'] here
548                 return good
549             except:
550                 log.err(None, 'Error processing data received from worker:')
551                 return False
552         
553         web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
554         
555         def get_rate():
556             if current_work.value['best_share_hash'] is not None:
557                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
558                 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 720))
559                 fracs = [read_stale_frac(share) for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if read_stale_frac(share) is not None]
560                 return json.dumps(int(att_s / (1. - (math.median(fracs) if fracs else 0))))
561             return json.dumps(None)
562         
563         def get_users():
564             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
565             weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
566             res = {}
567             for script in sorted(weights, key=lambda s: weights[s]):
568                 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
569             return json.dumps(res)
570         
571         class WebInterface(resource.Resource):
572             def __init__(self, func, mime_type):
573                 self.func, self.mime_type = func, mime_type
574             
575             def render_GET(self, request):
576                 request.setHeader('Content-Type', self.mime_type)
577                 return self.func()
578         
579         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
580         web_root.putChild('users', WebInterface(get_users, 'application/json'))
581         web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
582         if draw is not None:
583             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
584         
585         reactor.listenTCP(args.worker_port, server.Site(web_root))
586         
587         print '    ...success!'
588         print
589         
590         # done!
591         
592         # do new getwork when a block is heard on the p2p interface
593         
594         def new_block(block_hash):
595             work_updated.happened()
596         factory.new_block.watch(new_block)
597         
598         print 'Started successfully!'
599         print
600         
601         ht.updated.watch(set_real_work2)
602         
603         @defer.inlineCallbacks
604         def work1_thread():
605             while True:
606                 flag = work_updated.get_deferred()
607                 try:
608                     yield set_real_work1()
609                 except:
610                     log.err()
611                 yield defer.DeferredList([flag, deferral.sleep(random.uniform(1, 10))], fireOnOneCallback=True)
612         
613         @defer.inlineCallbacks
614         def work2_thread():
615             while True:
616                 try:
617                     set_real_work2()
618                 except:
619                     log.err()
620                 yield deferral.sleep(random.expovariate(1/20))
621         
622         work1_thread()
623         work2_thread()
624         
625         
626         if hasattr(signal, 'SIGALRM'):
627             def watchdog_handler(signum, frame):
628                 print 'Watchdog timer went off at:'
629                 traceback.print_stack()
630             
631             signal.signal(signal.SIGALRM, watchdog_handler)
632             task.LoopingCall(signal.alarm, 30).start(1)
633         
634         
635         def read_stale_frac(share):
636             if isinstance(share, p2pool.NewShare):
637                 return share.share_data['stale_frac']/254 if share.share_data['stale_frac'] != 255 else None
638             if len(share.nonce) < 4:
639                 return None
640             a, b = struct.unpack("<HH", share.nonce[-4:])
641             if a == 0 or a != b:
642                 return None
643             return a/65535
644
645         pool_str = None;
646         while True:
647             yield deferral.sleep(3)
648             try:
649                 if time.time() > current_work2.value['last_update'] + 60:
650                     print '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
651                 if current_work.value['best_share_hash'] is not None:
652                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
653                     if height > 2:
654                         att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 720))
655                         weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
656                         shares, stale_doa_shares, stale_not_doa_shares = get_share_counts(True)
657                         stale_shares = stale_doa_shares + stale_not_doa_shares
658                         fracs = [read_stale_frac(share) for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if read_stale_frac(share) is not None]
659                         str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i' % (
660                             math.format(int(att_s / (1. - (math.median(fracs) if fracs else 0)))),
661                             height,
662                             len(tracker.verified.shares),
663                             len(tracker.shares),
664                             weights.get(my_script, 0)/total_weight*100,
665                             math.format(int(weights.get(my_script, 0)*att_s//total_weight / (1. - (math.median(fracs) if fracs else 0)))),
666                             shares,
667                             stale_not_doa_shares,
668                             stale_doa_shares,
669                             len(p2p_node.peers),
670                         ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
671                         if (str != pool_str):
672                             print str;
673                             pool_str = str;
674                         if fracs:
675                             med = math.median(fracs)
676                             print 'Median stale proportion:', med
677                             if shares:
678                                 print '    Own:', stale_shares/shares
679                                 if med < .99:
680                                     print '    Own efficiency: %.02f%%' % (100*(1 - stale_shares/shares)/(1 - med),)
681             
682             
683             except:
684                 log.err()
685     except:
686         log.err(None, 'Fatal error:')
687     finally:
688         reactor.stop()
689
690 def run():
691     class FixedArgumentParser(argparse.ArgumentParser):
692         def _read_args_from_files(self, arg_strings):
693             # expand arguments referencing files
694             new_arg_strings = []
695             for arg_string in arg_strings:
696                 
697                 # for regular arguments, just add them back into the list
698                 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
699                     new_arg_strings.append(arg_string)
700                 
701                 # replace arguments referencing files with the file content
702                 else:
703                     try:
704                         args_file = open(arg_string[1:])
705                         try:
706                             arg_strings = []
707                             for arg_line in args_file.read().splitlines():
708                                 for arg in self.convert_arg_line_to_args(arg_line):
709                                     arg_strings.append(arg)
710                             arg_strings = self._read_args_from_files(arg_strings)
711                             new_arg_strings.extend(arg_strings)
712                         finally:
713                             args_file.close()
714                     except IOError:
715                         err = sys.exc_info()[1]
716                         self.error(str(err))
717             
718             # return the modified argument list
719             return new_arg_strings
720         
721         def convert_arg_line_to_args(self, arg_line):
722             return [arg for arg in arg_line.split() if arg.strip()]
723     
724     parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,), fromfile_prefix_chars='@')
725     parser.add_argument('--version', action='version', version=p2pool_init.__version__)
726     parser.add_argument('--net',
727         help='use specified network (default: bitcoin)',
728         action='store', choices=sorted(x for x in p2pool.nets if 'testnet' not in x), default='bitcoin', dest='net_name')
729     parser.add_argument('--testnet',
730         help='''use the network's testnet''',
731         action='store_const', const=True, default=False, dest='testnet')
732     parser.add_argument('--debug',
733         help='debugging mode',
734         action='store_const', const=True, default=False, dest='debug')
735     parser.add_argument('-a', '--address',
736         help='generate to this address (defaults to requesting one from bitcoind)',
737         type=str, action='store', default=None, dest='address')
738     parser.add_argument('--logfile',
739         help='''log to specific file (defaults to <network_name>.log in run_p2pool.py's directory)''',
740         type=str, action='store', default=None, dest='logfile')
741     parser.add_argument('--merged-url',
742         help='call getauxblock on this url to get work for merged mining',
743         type=str, action='store', default=None, dest='merged_url')
744     parser.add_argument('--merged-userpass',
745         help='merge daemon user and password, separated by a colon. Example: ncuser:ncpass',
746         type=str, action='store', default=None, dest='merged_userpass')
747     parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
748         help='percentage amount to donate to author of p2pool. Default: 0.5',
749         type=float, action='store', default=0.5, dest='donation_percentage')
750     
751     p2pool_group = parser.add_argument_group('p2pool interface')
752     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
753         help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
754         type=int, action='store', default=None, dest='p2pool_port')
755     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
756         help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
757         type=str, action='append', default=[], dest='p2pool_nodes')
758     parser.add_argument('-l', '--low-bandwidth',
759         help='trade lower bandwidth usage for higher latency (reduced efficiency)',
760         action='store_true', default=False, dest='low_bandwidth')
761     parser.add_argument('--disable-upnp',
762         help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
763         action='store_false', default=True, dest='upnp')
764     
765     worker_group = parser.add_argument_group('worker interface')
766     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
767         help='listen on PORT for RPC connections from miners asking for work and providing responses (default: bitcoin: 9332 namecoin: 9331 ixcoin: 9330 i0coin: 9329 solidcoin: 9328 litecoin: 9327, +10000 for testnets)',
768         type=int, action='store', default=None, dest='worker_port')
769     worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
770         help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:9332/fee . default: 0''',
771         type=float, action='store', default=0, dest='worker_fee')
772     
773     bitcoind_group = parser.add_argument_group('bitcoind interface')
774     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
775         help='connect to a bitcoind at this address (default: 127.0.0.1)',
776         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
777     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
778         help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332 ixcoin: 8338 i0coin: 7332 litecoin: 9332)',
779         type=int, action='store', default=None, dest='bitcoind_rpc_port')
780     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
781         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 namecoin: 8334 ixcoin: 8337 i0coin: 7333 solidcoin: 7555 litecoin: 9333, +10000 for testnets)',
782         type=int, action='store', default=None, dest='bitcoind_p2p_port')
783     
784     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSER',
785         help='bitcoind RPC interface username (default: empty)',
786         type=str, action='store', default='', nargs='?', dest='bitcoind_rpc_username')
787     bitcoind_group.add_argument(metavar='BITCOIND_RPCPASSWORD',
788         help='bitcoind RPC interface password',
789         type=str, action='store', dest='bitcoind_rpc_password')
790     
791     args = parser.parse_args()
792     
793     if args.debug:
794         p2pool_init.DEBUG = True
795     
796     if args.logfile is None:
797         args.logfile = os.path.join(os.path.dirname(sys.argv[0]), args.net_name + ('_testnet' if args.testnet else '') + '.log')
798     
799     class LogFile(object):
800         def __init__(self, filename):
801             self.filename = filename
802             self.inner_file = None
803             self.reopen()
804         def reopen(self):
805             if self.inner_file is not None:
806                 self.inner_file.close()
807             open(self.filename, 'a').close()
808             f = open(self.filename, 'rb')
809             f.seek(0, os.SEEK_END)
810             length = f.tell()
811             if length > 100*1000*1000:
812                 f.seek(-1000*1000, os.SEEK_END)
813                 while True:
814                     if f.read(1) in ('', '\n'):
815                         break
816                 data = f.read()
817                 f.close()
818                 f = open(self.filename, 'wb')
819                 f.write(data)
820             f.close()
821             self.inner_file = open(self.filename, 'a')
822         def write(self, data):
823             self.inner_file.write(data)
824         def flush(self):
825             self.inner_file.flush()
826     class TeePipe(object):
827         def __init__(self, outputs):
828             self.outputs = outputs
829         def write(self, data):
830             for output in self.outputs:
831                 output.write(data)
832         def flush(self):
833             for output in self.outputs:
834                 output.flush()
835     class TimestampingPipe(object):
836         def __init__(self, inner_file):
837             self.inner_file = inner_file
838             self.buf = ''
839             self.softspace = 0
840         def write(self, data):
841             buf = self.buf + data
842             lines = buf.split('\n')
843             for line in lines[:-1]:
844                 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
845                 self.inner_file.flush()
846             self.buf = lines[-1]
847         def flush(self):
848             pass
849     logfile = LogFile(args.logfile)
850     sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
851     if hasattr(signal, "SIGUSR1"):
852         def sigusr1(signum, frame):
853             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
854             logfile.reopen()
855             print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
856         signal.signal(signal.SIGUSR1, sigusr1)
857     task.LoopingCall(logfile.reopen).start(5)
858     
859     args.net = p2pool.nets[args.net_name + ('_testnet' if args.testnet else '')]
860     
861     if args.bitcoind_rpc_port is None:
862         args.bitcoind_rpc_port = args.net.BITCOIN_RPC_PORT
863     
864     if args.bitcoind_p2p_port is None:
865         args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
866     
867     if args.p2pool_port is None:
868         args.p2pool_port = args.net.P2P_PORT
869     
870     if args.worker_port is None:
871         args.worker_port = args.net.WORKER_PORT
872     
873     if args.address is not None:
874         try:
875             args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
876         except Exception, e:
877             parser.error('error parsing address: ' + repr(e))
878     else:
879         args.pubkey_hash = None
880     
881     if (args.merged_url is None) ^ (args.merged_userpass is None):
882         parser.error('must specify --merged-url and --merged-userpass')
883     
884     reactor.callWhenRunning(main, args)
885     reactor.run()