changed share fraction displays to include confidence intervals and put them all...
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2 # coding=utf-8
3
4 from __future__ import division
5
6 import argparse
7 import datetime
8 import itertools
9 import os
10 import random
11 import sqlite3
12 import struct
13 import sys
14 import time
15 import json
16 import signal
17 import traceback
18
19 from twisted.internet import defer, reactor, task
20 from twisted.web import server, resource
21 from twisted.python import log
22 from nattraverso import portmapper, ipdiscover
23
24 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
25 from bitcoin import worker_interface
26 from util import db, expiring_dict, jsonrpc, variable, deferral, math
27 from . import p2p, skiplists, networks
28 import p2pool, p2pool.data as p2pool_data
29
30 @deferral.retry('Error getting work from bitcoind:', 3)
31 @defer.inlineCallbacks
32 def getwork(bitcoind):
33     work = yield bitcoind.rpc_getmemorypool()
34     defer.returnValue(dict(
35         version=work['version'],
36         previous_block_hash=int(work['previousblockhash'], 16),
37         transactions=[bitcoin_data.tx_type.unpack(x.decode('hex')) for x in work['transactions']],
38         subsidy=work['coinbasevalue'],
39         time=work['time'],
40         target=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
41     ))
42
43 @deferral.retry('Error creating payout script:', 10)
44 @defer.inlineCallbacks
45 def get_payout_script2(bitcoind, net):
46     address = yield bitcoind.rpc_getaccountaddress('p2pool')
47     validate_response = yield bitcoind.rpc_validateaddress(address)
48     if 'pubkey' not in validate_response:
49         print '    Pubkey request failed. Falling back to payout to address.'
50         defer.returnValue(bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(address, net)))
51     pubkey = validate_response['pubkey'].decode('hex')
52     defer.returnValue(bitcoin_data.pubkey_to_script2(pubkey))
53
54 @defer.inlineCallbacks
55 def main(args, net):
56     try:
57         print 'p2pool (version %s)' % (p2pool.__version__,)
58         print
59         try:
60             from . import draw
61         except ImportError:
62             draw = None
63             print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
64             print
65         
66         # connect to bitcoind over JSON-RPC and do initial getmemorypool
67         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
68         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
69         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
70         good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.BITCOIN_RPC_CHECK)(bitcoind)
71         if not good:
72             print "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
73             return
74         temp_work = yield getwork(bitcoind)
75         print '    ...success!'
76         print '    Current block hash: %x' % (temp_work['previous_block_hash'],)
77         print
78         
79         # connect to bitcoind over bitcoin-p2p
80         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
81         factory = bitcoin_p2p.ClientFactory(net)
82         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
83         yield factory.getProtocol() # waits until handshake is successful
84         print '    ...success!'
85         print
86         
87         if args.pubkey_hash is None:
88             print 'Getting payout address from bitcoind...'
89             my_script = yield get_payout_script2(bitcoind, net)
90         else:
91             print 'Computing payout script from provided address....'
92             my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
93         print '    ...success!'
94         print '    Payout script:', bitcoin_data.script2_to_human(my_script, net)
95         print
96         
97         print 'Loading cached block headers...'
98         ht = bitcoin_p2p.HeightTracker(factory, net.NAME + '_headers.dat')
99         print '   ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
100         print
101         
102         tracker = p2pool_data.OkayTracker(net)
103         shared_share_hashes = set()
104         ss = p2pool_data.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), net.NAME + '_shares.'), net)
105         known_verified = set()
106         print "Loading shares..."
107         for i, (mode, contents) in enumerate(ss.get_shares()):
108             if mode == 'share':
109                 if contents.hash in tracker.shares:
110                     continue
111                 shared_share_hashes.add(contents.hash)
112                 contents.time_seen = 0
113                 tracker.add(contents)
114                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
115                     print "    %i" % (len(tracker.shares),)
116             elif mode == 'verified_hash':
117                 known_verified.add(contents)
118             else:
119                 raise AssertionError()
120         print "    ...inserting %i verified shares..." % (len(known_verified),)
121         for h in known_verified:
122             if h not in tracker.shares:
123                 ss.forget_verified_share(h)
124                 continue
125             tracker.verified.add(tracker.shares[h])
126         print "    ...done loading %i shares!" % (len(tracker.shares),)
127         print
128         tracker.added.watch(lambda share: ss.add_share(share))
129         tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
130         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
131         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
132         tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
133         
134         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
135         
136         # information affecting work that should trigger a long-polling update
137         current_work = variable.Variable(None)
138         # information affecting work that should not trigger a long-polling update
139         current_work2 = variable.Variable(None)
140         
141         work_updated = variable.Event()
142         
143         requested = expiring_dict.ExpiringDict(300)
144         
145         @defer.inlineCallbacks
146         def set_real_work1():
147             work = yield getwork(bitcoind)
148             changed = work['previous_block_hash'] != current_work.value['previous_block'] if current_work.value is not None else True
149             current_work.set(dict(
150                 version=work['version'],
151                 previous_block=work['previous_block_hash'],
152                 target=work['target'],
153                 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
154                 aux_work=current_work.value['aux_work'] if current_work.value is not None else None,
155             ))
156             current_work2.set(dict(
157                 time=work['time'],
158                 transactions=work['transactions'],
159                 subsidy=work['subsidy'],
160                 clock_offset=time.time() - work['time'],
161                 last_update=time.time(),
162             ))
163             if changed:
164                 set_real_work2()
165         
166         def set_real_work2():
167             best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
168             
169             t = dict(current_work.value)
170             t['best_share_hash'] = best
171             current_work.set(t)
172             
173             t = time.time()
174             for peer2, share_hash in desired:
175                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
176                     continue
177                 last_request_time, count = requested.get(share_hash, (None, 0))
178                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
179                     continue
180                 potential_peers = set()
181                 for head in tracker.tails[share_hash]:
182                     potential_peers.update(peer_heads.get(head, set()))
183                 potential_peers = [peer for peer in potential_peers if peer.connected2]
184                 if count == 0 and peer2 is not None and peer2.connected2:
185                     peer = peer2
186                 else:
187                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
188                     if peer is None:
189                         continue
190                 
191                 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
192                 peer.send_getshares(
193                     hashes=[share_hash],
194                     parents=2000,
195                     stops=list(set(tracker.heads) | set(
196                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
197                     ))[:100],
198                 )
199                 requested[share_hash] = t, count + 1
200         
201         print 'Initializing work...'
202         yield set_real_work1()
203         set_real_work2()
204         print '    ...success!'
205         print
206         
207         @defer.inlineCallbacks
208         def set_merged_work():
209             if not args.merged_url:
210                 return
211             while True:
212                 merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
213                 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged.rpc_getauxblock)()
214                 x = dict(current_work.value)
215                 x['aux_work'] = dict(
216                     hash=int(auxblock['hash'], 16),
217                     target=bitcoin_data.HashType().unpack(auxblock['target'].decode('hex')),
218                     chain_id=auxblock['chainid'],
219                 )
220                 #print x['aux_work']
221                 current_work.set(x)
222                 yield deferral.sleep(1)
223         set_merged_work()
224         
225         start_time = time.time() - current_work2.value['clock_offset']
226         
227         # setup p2p logic and join p2pool network
228         
229         def p2p_shares(shares, peer=None):
230             if len(shares) > 5:
231                 print 'Processing %i shares...' % (len(shares),)
232             
233             new_count = 0
234             for share in shares:
235                 if share.hash in tracker.shares:
236                     #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
237                     continue
238                 
239                 new_count += 1
240                 
241                 #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
242                 
243                 tracker.add(share)
244             
245             if shares and peer is not None:
246                 peer_heads.setdefault(shares[0].hash, set()).add(peer)
247             
248             if new_count:
249                 set_real_work2()
250             
251             if len(shares) > 5:
252                 print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
253         
254         @tracker.verified.added.watch
255         def _(share):
256             if share.pow_hash <= share.header['target']:
257                 if factory.conn.value is not None:
258                     factory.conn.value.send_block(block=share.as_block(tracker, net))
259                 else:
260                     print 'No bitcoind connection! Erp!'
261                 print
262                 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool_data.format_hash(share.hash), share.header_hash,)
263                 print
264         
265         def p2p_share_hashes(share_hashes, peer):
266             t = time.time()
267             get_hashes = []
268             for share_hash in share_hashes:
269                 if share_hash in tracker.shares:
270                     continue
271                 last_request_time, count = requested.get(share_hash, (None, 0))
272                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
273                     continue
274                 print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
275                 get_hashes.append(share_hash)
276                 requested[share_hash] = t, count + 1
277             
278             if share_hashes and peer is not None:
279                 peer_heads.setdefault(share_hashes[0], set()).add(peer)
280             if get_hashes:
281                 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
282         
283         def p2p_get_shares(share_hashes, parents, stops, peer):
284             parents = min(parents, 1000//len(share_hashes))
285             stops = set(stops)
286             shares = []
287             for share_hash in share_hashes:
288                 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
289                     if share.hash in stops:
290                         break
291                     shares.append(share)
292             print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
293             peer.sendShares(shares)
294         
295         print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
296         
297         def parse(x):
298             if ':' in x:
299                 ip, port = x.split(':')
300                 return ip, int(port)
301             else:
302                 return x, net.P2P_PORT
303         
304         nodes = set([
305             ('72.14.191.28', net.P2P_PORT),
306             ('62.204.197.159', net.P2P_PORT),
307             ('142.58.248.28', net.P2P_PORT),
308             ('94.23.34.145', net.P2P_PORT),
309         ])
310         for host in [
311             'p2pool.forre.st',
312             'dabuttonfactory.com',
313         ]:
314             try:
315                 nodes.add(((yield reactor.resolve(host)), net.P2P_PORT))
316             except:
317                 log.err(None, 'Error resolving bootstrap node IP:')
318         
319         if net.NAME == 'litecoin':
320             nodes.add(((yield reactor.resolve('liteco.in')), net.P2P_PORT))
321         
322         p2p_node = p2p.Node(
323             current_work=current_work,
324             port=args.p2pool_port,
325             net=net,
326             addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), net.NAME),
327             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
328         )
329         p2p_node.handle_shares = p2p_shares
330         p2p_node.handle_share_hashes = p2p_share_hashes
331         p2p_node.handle_get_shares = p2p_get_shares
332         
333         p2p_node.start()
334         
335         # send share when the chain changes to their chain
336         def work_changed(new_work):
337             #print 'Work changed:', new_work
338             shares = []
339             for share in tracker.get_chain_known(new_work['best_share_hash']):
340                 if share.hash in shared_share_hashes:
341                     break
342                 shared_share_hashes.add(share.hash)
343                 shares.append(share)
344             
345             for peer in p2p_node.peers.itervalues():
346                 peer.sendShares([share for share in shares if share.peer is not peer])
347         
348         current_work.changed.watch(work_changed)
349         
350         print '    ...success!'
351         print
352         
353         @defer.inlineCallbacks
354         def upnp_thread():
355             while True:
356                 try:
357                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
358                     if is_lan:
359                         pm = yield portmapper.get_port_mapper()
360                         yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
361                 except defer.TimeoutError:
362                     pass
363                 except:
364                     if p2pool.DEBUG:
365                         log.err(None, "UPnP error:")
366                 yield deferral.sleep(random.expovariate(1/120))
367         
368         if args.upnp:
369             upnp_thread()
370         
371         # start listening for workers with a JSON-RPC server
372         
373         print 'Listening for workers on port %i...' % (args.worker_port,)
374         
375         # setup worker logic
376         
377         merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
378         run_identifier = struct.pack('<I', random.randrange(2**32))
379         
380         share_counter = skiplists.CountsSkipList(tracker, run_identifier)
381         removed_unstales = set()
382         def get_share_counts(doa=False):
383             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
384             matching_in_chain = share_counter(current_work.value['best_share_hash'], height) | removed_unstales
385             shares_in_chain = my_shares & matching_in_chain
386             stale_shares = my_shares - matching_in_chain
387             if doa:
388                 stale_doa_shares = stale_shares & doa_shares
389                 stale_not_doa_shares = stale_shares - stale_doa_shares
390                 return len(shares_in_chain) + len(stale_shares), len(stale_doa_shares), len(stale_not_doa_shares)
391             return len(shares_in_chain) + len(stale_shares), len(stale_shares)
392         @tracker.verified.removed.watch
393         def _(share):
394             if share.hash in my_shares and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
395                 removed_unstales.add(share.hash)
396         
397         
398         def get_payout_script_from_username(request):
399             user = worker_interface.get_username(request)
400             if user is None:
401                 return None
402             try:
403                 return bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(user, net))
404             except: # XXX blah
405                 return None
406         
407         def compute(request):
408             state = current_work.value
409             payout_script = get_payout_script_from_username(request)
410             if payout_script is None or random.uniform(0, 100) < args.worker_fee:
411                 payout_script = my_script
412             if state['best_share_hash'] is None and net.PERSIST:
413                 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
414             if len(p2p_node.peers) == 0 and net.PERSIST:
415                 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
416             if time.time() > current_work2.value['last_update'] + 60:
417                 raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
418             
419             if state['aux_work'] is not None:
420                 aux_str = '\xfa\xbemm' + bitcoin_data.HashType().pack(state['aux_work']['hash'])[::-1] + struct.pack('<ii', 1, 0)
421             else:
422                 aux_str = ''
423             
424             # XXX assuming generate_tx is smallish here..
425             def get_stale_frac():
426                 shares, stale_shares = get_share_counts()
427                 if shares == 0:
428                     return ""
429                 frac = stale_shares/shares
430                 return 2*struct.pack('<H', int(65535*frac + .5))
431             subsidy = current_work2.value['subsidy']
432             
433             
434             timestamp = current_work2.value['time']
435             previous_share = tracker.shares[state['best_share_hash']] if state['best_share_hash'] is not None else None
436             share_info, generate_tx = p2pool_data.generate_transaction(
437                 tracker=tracker,
438                 share_data=dict(
439                     previous_share_hash=state['best_share_hash'],
440                     coinbase=aux_str,
441                     nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
442                     new_script=payout_script,
443                     subsidy=subsidy,
444                     donation=math.perfect_round(65535*args.donation_percentage/100),
445                     stale_frac=(lambda shares, stales:
446                         255 if shares == 0 else math.perfect_round(254*stales/shares)
447                     )(*get_share_counts()),
448                 ),
449                 block_target=state['target'],
450                 desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
451                 net=net,
452             )
453             
454             print 'New work for worker! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (bitcoin_data.target_to_difficulty(share_info['target']), (sum(t['value'] for t in generate_tx['tx_outs'] if t['script'] == payout_script) -subsidy//200)*1e-8, net.BITCOIN_SYMBOL, subsidy*1e-8, net.BITCOIN_SYMBOL, len(current_work2.value['transactions']))
455             #print 'Target: %x' % (p2pool_data.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
456             #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
457             transactions = [generate_tx] + list(current_work2.value['transactions'])
458             merkle_root = bitcoin_data.merkle_hash(transactions)
459             merkle_root_to_transactions[merkle_root] = share_info, transactions
460             
461             target2 = share_info['target']
462             times[merkle_root] = time.time()
463             #print 'SENT', 2**256//p2pool_data.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
464             return bitcoin_getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2), state['best_share_hash']
465         
466         my_shares = set()
467         doa_shares = set()
468         times = {}
469         
470         def got_response(header, request):
471             try:
472                 user = worker_interface.get_username(request)
473                 # match up with transactions
474                 xxx = merkle_root_to_transactions.get(header['merkle_root'], None)
475                 if xxx is None:
476                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
477                     return False
478                 share_info, transactions = xxx
479                 
480                 hash_ = bitcoin_data.block_header_type.hash256(header)
481                 
482                 pow_hash = net.BITCOIN_POW_FUNC(header)
483                 
484                 if pow_hash <= header['target'] or p2pool.DEBUG:
485                     if factory.conn.value is not None:
486                         factory.conn.value.send_block(block=dict(header=header, txs=transactions))
487                     else:
488                         print 'No bitcoind connection! Erp!'
489                     if pow_hash <= header['target']:
490                         print
491                         print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
492                         print
493                 
494                 if current_work.value['aux_work'] is not None and pow_hash <= current_work.value['aux_work']['target']:
495                     try:
496                         aux_pow = dict(
497                             merkle_tx=dict(
498                                 tx=transactions[0],
499                                 block_hash=hash_,
500                                 merkle_branch=[x['hash'] for x in p2pool_data.calculate_merkle_branch(transactions, 0)],
501                                 index=0,
502                             ),
503                             merkle_branch=[],
504                             index=0,
505                             parent_block_header=header,
506                         )
507                         
508                         a, b = transactions[0]['tx_ins'][0]['script'][-32-8:-8].encode('hex'), bitcoin_data.aux_pow_type.pack(aux_pow).encode('hex')
509                         #print a, b
510                         merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
511                         def _(res):
512                             print "MERGED RESULT:", res
513                         merged.rpc_getauxblock(a, b).addBoth(_)
514                     except:
515                         log.err(None, 'Error while processing merged mining POW:')
516                 
517                 target = share_info['target']
518                 if pow_hash > target:
519                     print 'Worker submitted share with hash > target:\nhash  : %x\ntarget: %x' % (pow_hash, target)
520                     return False
521                 share = p2pool_data.Share(net, header, share_info, other_txs=transactions[1:])
522                 my_shares.add(share.hash)
523                 if share.previous_hash != current_work.value['best_share_hash']:
524                     doa_shares.add(share.hash)
525                 print 'GOT SHARE! %s %s prev %s age %.2fs' % (user, p2pool_data.format_hash(share.hash), p2pool_data.format_hash(share.previous_hash), time.time() - times[header['merkle_root']]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
526                 good = share.previous_hash == current_work.value['best_share_hash']
527                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
528                 p2p_shares([share])
529                 # eg. good = share.hash == current_work.value['best_share_hash'] here
530                 return good
531             except:
532                 log.err(None, 'Error processing data received from worker:')
533                 return False
534         
535         web_root = worker_interface.WorkerInterface(compute, got_response, current_work.changed)
536         
537         def get_rate():
538             if current_work.value['best_share_hash'] is not None:
539                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
540                 att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720))
541                 fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
542                 return json.dumps(int(att_s / (1. - (math.median(fracs) if fracs else 0))))
543             return json.dumps(None)
544         
545         def get_users():
546             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
547             weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
548             res = {}
549             for script in sorted(weights, key=lambda s: weights[s]):
550                 res[bitcoin_data.script2_to_human(script, net)] = weights[script]/total_weight
551             return json.dumps(res)
552         
553         class WebInterface(resource.Resource):
554             def __init__(self, func, mime_type):
555                 self.func, self.mime_type = func, mime_type
556             
557             def render_GET(self, request):
558                 request.setHeader('Content-Type', self.mime_type)
559                 return self.func()
560         
561         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
562         web_root.putChild('users', WebInterface(get_users, 'application/json'))
563         web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
564         if draw is not None:
565             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
566         
567         reactor.listenTCP(args.worker_port, server.Site(web_root))
568         
569         print '    ...success!'
570         print
571         
572         # done!
573         
574         # do new getwork when a block is heard on the p2p interface
575         
576         def new_block(block_hash):
577             work_updated.happened()
578         factory.new_block.watch(new_block)
579         
580         print 'Started successfully!'
581         print
582         
583         ht.updated.watch(set_real_work2)
584         
585         @defer.inlineCallbacks
586         def work1_thread():
587             while True:
588                 flag = work_updated.get_deferred()
589                 try:
590                     yield set_real_work1()
591                 except:
592                     log.err()
593                 yield defer.DeferredList([flag, deferral.sleep(random.uniform(1, 10))], fireOnOneCallback=True)
594         
595         @defer.inlineCallbacks
596         def work2_thread():
597             while True:
598                 try:
599                     set_real_work2()
600                 except:
601                     log.err()
602                 yield deferral.sleep(random.expovariate(1/20))
603         
604         work1_thread()
605         work2_thread()
606         
607         
608         if hasattr(signal, 'SIGALRM'):
609             def watchdog_handler(signum, frame):
610                 print 'Watchdog timer went off at:'
611                 traceback.print_stack()
612             
613             signal.signal(signal.SIGALRM, watchdog_handler)
614             task.LoopingCall(signal.alarm, 30).start(1)
615         
616         
617         pool_str = None;
618         while True:
619             yield deferral.sleep(3)
620             try:
621                 if time.time() > current_work2.value['last_update'] + 60:
622                     print '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
623                 if current_work.value['best_share_hash'] is not None:
624                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
625                     if height > 2:
626                         att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720))
627                         weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
628                         shares, stale_doa_shares, stale_not_doa_shares = get_share_counts(True)
629                         stale_shares = stale_doa_shares + stale_not_doa_shares
630                         fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
631                         str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i' % (
632                             math.format(int(att_s / (1. - (math.median(fracs) if fracs else 0)))),
633                             height,
634                             len(tracker.verified.shares),
635                             len(tracker.shares),
636                             weights.get(my_script, 0)/total_weight*100,
637                             math.format(int(weights.get(my_script, 0)*att_s//total_weight / (1. - (math.median(fracs) if fracs else 0)))),
638                             shares,
639                             stale_not_doa_shares,
640                             stale_doa_shares,
641                             len(p2p_node.peers),
642                         ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
643                         if (str != pool_str):
644                             print str;
645                             pool_str = str;
646                         if fracs:
647                             med = math.median(fracs)
648                             print 'Pool stales: %i%%' % (int(100*med+.5),),
649                             conf = 0.9
650                             if shares:
651                                 print 'Own:', '%i±%i%%' % tuple(int(100*x+.5) for x in math.interval_to_center_radius(math.binomial_conf_interval(stale_shares, shares, conf))),
652                                 if med < .99:
653                                     print 'Own efficiency:', '%i±%i%%' % tuple(int(100*x+.5) for x in math.interval_to_center_radius((1 - y)/(1 - med) for y in math.binomial_conf_interval(stale_shares, shares, conf)[::-1])),
654                                 print '(%i%% confidence)' % (int(100*conf+.5),),
655                             print
656             
657             
658             except:
659                 log.err()
660     except:
661         log.err(None, 'Fatal error:')
662     finally:
663         reactor.stop()
664
665 def run():
666     class FixedArgumentParser(argparse.ArgumentParser):
667         def _read_args_from_files(self, arg_strings):
668             # expand arguments referencing files
669             new_arg_strings = []
670             for arg_string in arg_strings:
671                 
672                 # for regular arguments, just add them back into the list
673                 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
674                     new_arg_strings.append(arg_string)
675                 
676                 # replace arguments referencing files with the file content
677                 else:
678                     try:
679                         args_file = open(arg_string[1:])
680                         try:
681                             arg_strings = []
682                             for arg_line in args_file.read().splitlines():
683                                 for arg in self.convert_arg_line_to_args(arg_line):
684                                     arg_strings.append(arg)
685                             arg_strings = self._read_args_from_files(arg_strings)
686                             new_arg_strings.extend(arg_strings)
687                         finally:
688                             args_file.close()
689                     except IOError:
690                         err = sys.exc_info()[1]
691                         self.error(str(err))
692             
693             # return the modified argument list
694             return new_arg_strings
695         
696         def convert_arg_line_to_args(self, arg_line):
697             return [arg for arg in arg_line.split() if arg.strip()]
698     
699     parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
700     parser.add_argument('--version', action='version', version=p2pool.__version__)
701     parser.add_argument('--net',
702         help='use specified network (default: bitcoin)',
703         action='store', choices=sorted(networks.realnets), default='bitcoin', dest='net_name')
704     parser.add_argument('--testnet',
705         help='''use the network's testnet''',
706         action='store_const', const=True, default=False, dest='testnet')
707     parser.add_argument('--debug',
708         help='debugging mode',
709         action='store_const', const=True, default=False, dest='debug')
710     parser.add_argument('-a', '--address',
711         help='generate to this address (defaults to requesting one from bitcoind)',
712         type=str, action='store', default=None, dest='address')
713     parser.add_argument('--logfile',
714         help='''log to specific file (defaults to <network_name>.log in run_p2pool.py's directory)''',
715         type=str, action='store', default=None, dest='logfile')
716     parser.add_argument('--merged-url',
717         help='call getauxblock on this url to get work for merged mining',
718         type=str, action='store', default=None, dest='merged_url')
719     parser.add_argument('--merged-userpass',
720         help='merge daemon user and password, separated by a colon. Example: ncuser:ncpass',
721         type=str, action='store', default=None, dest='merged_userpass')
722     parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
723         help='percentage amount to donate to author of p2pool. Default: 0.5',
724         type=float, action='store', default=0.5, dest='donation_percentage')
725     
726     p2pool_group = parser.add_argument_group('p2pool interface')
727     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
728         help='use TCP port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.P2P_PORT) for _, n in sorted(networks.realnets.items())),
729         type=int, action='store', default=None, dest='p2pool_port')
730     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
731         help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to default p2pool P2P port), in addition to builtin addresses',
732         type=str, action='append', default=[], dest='p2pool_nodes')
733     parser.add_argument('--disable-upnp',
734         help='''don't attempt to forward p2pool P2P port from the WAN to this computer using UPnP''',
735         action='store_false', default=True, dest='upnp')
736     
737     worker_group = parser.add_argument_group('worker interface')
738     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
739         help='listen on PORT for RPC connections from miners asking for work and providing responses (default:%s)' % ', '.join('%s:%i' % (n.NAME, n.WORKER_PORT) for _, n in sorted(networks.realnets.items())),
740         type=int, action='store', default=None, dest='worker_port')
741     worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
742         help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee . default: 0''',
743         type=float, action='store', default=0, dest='worker_fee')
744     
745     bitcoind_group = parser.add_argument_group('bitcoind interface')
746     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
747         help='connect to a bitcoind at this address (default: 127.0.0.1)',
748         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
749     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
750         help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getmemorypool (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.BITCOIN_RPC_PORT) for _, n in sorted(networks.realnets.items())),
751         type=int, action='store', default=None, dest='bitcoind_rpc_port')
752     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
753         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.BITCOIN_P2P_PORT) for _, n in sorted(networks.realnets.items())),
754         type=int, action='store', default=None, dest='bitcoind_p2p_port')
755     
756     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSER',
757         help='bitcoind RPC interface username (default: empty)',
758         type=str, action='store', default='', nargs='?', dest='bitcoind_rpc_username')
759     bitcoind_group.add_argument(metavar='BITCOIND_RPCPASSWORD',
760         help='bitcoind RPC interface password',
761         type=str, action='store', dest='bitcoind_rpc_password')
762     
763     args = parser.parse_args()
764     
765     if args.debug:
766         p2pool.DEBUG = True
767     
768     net = networks.nets[args.net_name + ('_testnet' if args.testnet else '')]
769     
770     if args.logfile is None:
771         args.logfile = os.path.join(os.path.dirname(sys.argv[0]), net.NAME + '.log')
772     
773     class LogFile(object):
774         def __init__(self, filename):
775             self.filename = filename
776             self.inner_file = None
777             self.reopen()
778         def reopen(self):
779             if self.inner_file is not None:
780                 self.inner_file.close()
781             open(self.filename, 'a').close()
782             f = open(self.filename, 'rb')
783             f.seek(0, os.SEEK_END)
784             length = f.tell()
785             if length > 100*1000*1000:
786                 f.seek(-1000*1000, os.SEEK_END)
787                 while True:
788                     if f.read(1) in ('', '\n'):
789                         break
790                 data = f.read()
791                 f.close()
792                 f = open(self.filename, 'wb')
793                 f.write(data)
794             f.close()
795             self.inner_file = open(self.filename, 'a')
796         def write(self, data):
797             self.inner_file.write(data)
798         def flush(self):
799             self.inner_file.flush()
800     class TeePipe(object):
801         def __init__(self, outputs):
802             self.outputs = outputs
803         def write(self, data):
804             for output in self.outputs:
805                 output.write(data)
806         def flush(self):
807             for output in self.outputs:
808                 output.flush()
809     class TimestampingPipe(object):
810         def __init__(self, inner_file):
811             self.inner_file = inner_file
812             self.buf = ''
813             self.softspace = 0
814         def write(self, data):
815             buf = self.buf + data
816             lines = buf.split('\n')
817             for line in lines[:-1]:
818                 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
819                 self.inner_file.flush()
820             self.buf = lines[-1]
821         def flush(self):
822             pass
823     logfile = LogFile(args.logfile)
824     sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
825     if hasattr(signal, "SIGUSR1"):
826         def sigusr1(signum, frame):
827             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
828             logfile.reopen()
829             print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
830         signal.signal(signal.SIGUSR1, sigusr1)
831     task.LoopingCall(logfile.reopen).start(5)
832     
833     if args.bitcoind_rpc_port is None:
834         args.bitcoind_rpc_port = net.BITCOIN_RPC_PORT
835     
836     if args.bitcoind_p2p_port is None:
837         args.bitcoind_p2p_port = net.BITCOIN_P2P_PORT
838     
839     if args.p2pool_port is None:
840         args.p2pool_port = net.P2P_PORT
841     
842     if args.worker_port is None:
843         args.worker_port = net.WORKER_PORT
844     
845     if args.address is not None:
846         try:
847             args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net)
848         except Exception, e:
849             parser.error('error parsing address: ' + repr(e))
850     else:
851         args.pubkey_hash = None
852     
853     if (args.merged_url is None) ^ (args.merged_userpass is None):
854         parser.error('must specify --merged-url and --merged-userpass')
855     
856     reactor.callWhenRunning(main, args, net)
857     reactor.run()