39afeabddb1a8c3fc5a4a39a0d32f777891f2548
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2
3 from __future__ import division
4
5 import argparse
6 import datetime
7 import itertools
8 import os
9 import random
10 import sqlite3
11 import struct
12 import sys
13 import time
14 import json
15 import signal
16 import traceback
17
18 from twisted.internet import defer, reactor, task
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
22
23 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
24 from bitcoin import worker_interface
25 from util import db, expiring_dict, jsonrpc, variable, deferral, math
26 from . import p2p, skiplists, networks
27 import p2pool, p2pool.data as p2pool_data
28
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind):
32     work = yield bitcoind.rpc_getmemorypool()
33     defer.returnValue(dict(
34         version=work['version'],
35         previous_block_hash=int(work['previousblockhash'], 16),
36         transactions=[bitcoin_data.tx_type.unpack(x.decode('hex')) for x in work['transactions']],
37         subsidy=work['coinbasevalue'],
38         time=work['time'],
39         target=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
40     ))
41
42 @deferral.retry('Error creating payout script:', 10)
43 @defer.inlineCallbacks
44 def get_payout_script2(bitcoind, net):
45     address = yield bitcoind.rpc_getaccountaddress('p2pool')
46     try:
47         pubkey = (yield bitcoind.rpc_validateaddress(address))['pubkey'].decode('hex')
48     except:
49         log.err()
50         defer.returnValue(bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(address, net)))
51     else:
52         defer.returnValue(bitcoin_data.pubkey_to_script2(pubkey))
53
54 @defer.inlineCallbacks
55 def main(args, net):
56     try:
57         print 'p2pool (version %s)' % (p2pool.__version__,)
58         print
59         try:
60             from . import draw
61         except ImportError:
62             draw = None
63             print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
64             print
65         
66         # connect to bitcoind over JSON-RPC and do initial getmemorypool
67         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
68         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
69         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
70         good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.BITCOIN_RPC_CHECK)(bitcoind)
71         if not good:
72             print "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
73             return
74         temp_work = yield getwork(bitcoind)
75         print '    ...success!'
76         print '    Current block hash: %x' % (temp_work['previous_block_hash'],)
77         print
78         
79         # connect to bitcoind over bitcoin-p2p
80         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
81         factory = bitcoin_p2p.ClientFactory(net)
82         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
83         yield factory.getProtocol() # waits until handshake is successful
84         print '    ...success!'
85         print
86         
87         if args.pubkey_hash is None:
88             print 'Getting payout address from bitcoind...'
89             my_script = yield get_payout_script2(bitcoind, net)
90         else:
91             print 'Computing payout script from provided address....'
92             my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
93         print '    ...success!'
94         print '    Payout script:', bitcoin_data.script2_to_human(my_script, net)
95         print
96         
97         print 'Loading cached block headers...'
98         ht = bitcoin_p2p.HeightTracker(factory, net.NAME + '_headers.dat')
99         print '   ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
100         print
101         
102         tracker = p2pool_data.OkayTracker(net)
103         shared_share_hashes = set()
104         ss = p2pool_data.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), net.NAME + '_shares.'), net)
105         known_verified = set()
106         print "Loading shares..."
107         for i, (mode, contents) in enumerate(ss.get_shares()):
108             if mode == 'share':
109                 if contents.hash in tracker.shares:
110                     continue
111                 shared_share_hashes.add(contents.hash)
112                 contents.time_seen = 0
113                 tracker.add(contents)
114                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
115                     print "    %i" % (len(tracker.shares),)
116             elif mode == 'verified_hash':
117                 known_verified.add(contents)
118             else:
119                 raise AssertionError()
120         print "    ...inserting %i verified shares..." % (len(known_verified),)
121         for h in known_verified:
122             if h not in tracker.shares:
123                 ss.forget_verified_share(h)
124                 continue
125             tracker.verified.add(tracker.shares[h])
126         print "    ...done loading %i shares!" % (len(tracker.shares),)
127         print
128         tracker.added.watch(lambda share: ss.add_share(share))
129         tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
130         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
131         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
132         tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
133         
134         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
135         
136         # information affecting work that should trigger a long-polling update
137         current_work = variable.Variable(None)
138         # information affecting work that should not trigger a long-polling update
139         current_work2 = variable.Variable(None)
140         
141         work_updated = variable.Event()
142         
143         requested = expiring_dict.ExpiringDict(300)
144         
145         @defer.inlineCallbacks
146         def set_real_work1():
147             work = yield getwork(bitcoind)
148             changed = work['previous_block_hash'] != current_work.value['previous_block'] if current_work.value is not None else True
149             current_work.set(dict(
150                 version=work['version'],
151                 previous_block=work['previous_block_hash'],
152                 target=work['target'],
153                 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
154                 aux_work=current_work.value['aux_work'] if current_work.value is not None else None,
155             ))
156             current_work2.set(dict(
157                 time=work['time'],
158                 transactions=work['transactions'],
159                 subsidy=work['subsidy'],
160                 clock_offset=time.time() - work['time'],
161                 last_update=time.time(),
162             ))
163             if changed:
164                 set_real_work2()
165         
166         def set_real_work2():
167             best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
168             
169             t = dict(current_work.value)
170             t['best_share_hash'] = best
171             current_work.set(t)
172             
173             t = time.time()
174             for peer2, share_hash in desired:
175                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
176                     continue
177                 last_request_time, count = requested.get(share_hash, (None, 0))
178                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
179                     continue
180                 potential_peers = set()
181                 for head in tracker.tails[share_hash]:
182                     potential_peers.update(peer_heads.get(head, set()))
183                 potential_peers = [peer for peer in potential_peers if peer.connected2]
184                 if count == 0 and peer2 is not None and peer2.connected2:
185                     peer = peer2
186                 else:
187                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
188                     if peer is None:
189                         continue
190                 
191                 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
192                 peer.send_getshares(
193                     hashes=[share_hash],
194                     parents=2000,
195                     stops=list(set(tracker.heads) | set(
196                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
197                     ))[:100],
198                 )
199                 requested[share_hash] = t, count + 1
200         
201         print 'Initializing work...'
202         yield set_real_work1()
203         set_real_work2()
204         print '    ...success!'
205         print
206         
207         @defer.inlineCallbacks
208         def set_merged_work():
209             if not args.merged_url:
210                 return
211             while True:
212                 merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
213                 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged.rpc_getauxblock)()
214                 x = dict(current_work.value)
215                 x['aux_work'] = dict(
216                     hash=int(auxblock['hash'], 16),
217                     target=bitcoin_data.HashType().unpack(auxblock['target'].decode('hex')),
218                     chain_id=auxblock['chainid'],
219                 )
220                 #print x['aux_work']
221                 current_work.set(x)
222                 yield deferral.sleep(1)
223         set_merged_work()
224         
225         start_time = time.time() - current_work2.value['clock_offset']
226         
227         # setup p2p logic and join p2pool network
228         
229         def p2p_shares(shares, peer=None):
230             if len(shares) > 5:
231                 print 'Processing %i shares...' % (len(shares),)
232             
233             new_count = 0
234             for share in shares:
235                 if share.hash in tracker.shares:
236                     #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
237                     continue
238                 
239                 new_count += 1
240                 
241                 #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
242                 
243                 tracker.add(share)
244             
245             if shares and peer is not None:
246                 peer_heads.setdefault(shares[0].hash, set()).add(peer)
247             
248             if new_count:
249                 set_real_work2()
250             
251             if len(shares) > 5:
252                 print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
253         
254         @tracker.verified.added.watch
255         def _(share):
256             if share.pow_hash <= share.header['target']:
257                 if factory.conn.value is not None:
258                     factory.conn.value.send_block(block=share.as_block(tracker, net))
259                 else:
260                     print 'No bitcoind connection! Erp!'
261                 print
262                 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool_data.format_hash(share.hash), share.header_hash,)
263                 print
264         
265         def p2p_share_hashes(share_hashes, peer):
266             t = time.time()
267             get_hashes = []
268             for share_hash in share_hashes:
269                 if share_hash in tracker.shares:
270                     continue
271                 last_request_time, count = requested.get(share_hash, (None, 0))
272                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
273                     continue
274                 print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
275                 get_hashes.append(share_hash)
276                 requested[share_hash] = t, count + 1
277             
278             if share_hashes and peer is not None:
279                 peer_heads.setdefault(share_hashes[0], set()).add(peer)
280             if get_hashes:
281                 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
282         
283         def p2p_get_shares(share_hashes, parents, stops, peer):
284             parents = min(parents, 1000//len(share_hashes))
285             stops = set(stops)
286             shares = []
287             for share_hash in share_hashes:
288                 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
289                     if share.hash in stops:
290                         break
291                     shares.append(share)
292             print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
293             peer.sendShares(shares)
294         
295         print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
296         
297         def parse(x):
298             if ':' in x:
299                 ip, port = x.split(':')
300                 return ip, int(port)
301             else:
302                 return x, net.P2P_PORT
303         
304         nodes = set([
305             ('72.14.191.28', net.P2P_PORT),
306             ('62.204.197.159', net.P2P_PORT),
307             ('142.58.248.28', net.P2P_PORT),
308             ('94.23.34.145', net.P2P_PORT),
309         ])
310         for host in [
311             'p2pool.forre.st',
312             'dabuttonfactory.com',
313         ]:
314             try:
315                 nodes.add(((yield reactor.resolve(host)), net.P2P_PORT))
316             except:
317                 log.err(None, 'Error resolving bootstrap node IP:')
318
319         if net.NAME == 'litecoin':
320             nodes.add(((yield reactor.resolve('liteco.in')), net.P2P_PORT))
321         
322         p2p_node = p2p.Node(
323             current_work=current_work,
324             port=args.p2pool_port,
325             net=net,
326             addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), net.NAME),
327             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
328         )
329         p2p_node.handle_shares = p2p_shares
330         p2p_node.handle_share_hashes = p2p_share_hashes
331         p2p_node.handle_get_shares = p2p_get_shares
332         
333         p2p_node.start()
334         
335         # send share when the chain changes to their chain
336         def work_changed(new_work):
337             #print 'Work changed:', new_work
338             shares = []
339             for share in tracker.get_chain_known(new_work['best_share_hash']):
340                 if share.hash in shared_share_hashes:
341                     break
342                 shared_share_hashes.add(share.hash)
343                 shares.append(share)
344             
345             for peer in p2p_node.peers.itervalues():
346                 peer.sendShares([share for share in shares if share.peer is not peer])
347         
348         current_work.changed.watch(work_changed)
349         
350         print '    ...success!'
351         print
352         
353         @defer.inlineCallbacks
354         def upnp_thread():
355             while True:
356                 try:
357                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
358                     if is_lan:
359                         pm = yield portmapper.get_port_mapper()
360                         yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
361                 except defer.TimeoutError:
362                     pass
363                 except:
364                     if p2pool.DEBUG:
365                         log.err(None, "UPnP error:")
366                 yield deferral.sleep(random.expovariate(1/120))
367         
368         if args.upnp:
369             upnp_thread()
370         
371         # start listening for workers with a JSON-RPC server
372         
373         print 'Listening for workers on port %i...' % (args.worker_port,)
374         
375         # setup worker logic
376         
377         merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
378         run_identifier = struct.pack('<I', random.randrange(2**32))
379         
380         share_counter = skiplists.CountsSkipList(tracker, run_identifier)
381         removed_unstales = set()
382         def get_share_counts(doa=False):
383             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
384             matching_in_chain = share_counter(current_work.value['best_share_hash'], height) | removed_unstales
385             shares_in_chain = my_shares & matching_in_chain
386             stale_shares = my_shares - matching_in_chain
387             if doa:
388                 stale_doa_shares = stale_shares & doa_shares
389                 stale_not_doa_shares = stale_shares - stale_doa_shares
390                 return len(shares_in_chain) + len(stale_shares), len(stale_doa_shares), len(stale_not_doa_shares)
391             return len(shares_in_chain) + len(stale_shares), len(stale_shares)
392         @tracker.verified.removed.watch
393         def _(share):
394             if share.hash in my_shares and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
395                 removed_unstales.add(share.hash)
396         
397         
398         def get_payout_script_from_username(request):
399             user = worker_interface.get_username(request)
400             if user is None:
401                 return None
402             try:
403                 return bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(user, net))
404             except: # XXX blah
405                 return None
406         
407         def compute(request):
408             state = current_work.value
409             payout_script = get_payout_script_from_username(request)
410             if payout_script is None or random.uniform(0, 100) < args.worker_fee:
411                 payout_script = my_script
412             if state['best_share_hash'] is None and net.PERSIST:
413                 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
414             if len(p2p_node.peers) == 0 and net.PERSIST:
415                 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
416             if time.time() > current_work2.value['last_update'] + 60:
417                 raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
418             
419             if state['aux_work'] is not None:
420                 aux_str = '\xfa\xbemm' + bitcoin_data.HashType().pack(state['aux_work']['hash'])[::-1] + struct.pack('<ii', 1, 0)
421             else:
422                 aux_str = ''
423             
424             # XXX assuming generate_tx is smallish here..
425             def get_stale_frac():
426                 shares, stale_shares = get_share_counts()
427                 if shares == 0:
428                     return ""
429                 frac = stale_shares/shares
430                 return 2*struct.pack('<H', int(65535*frac + .5))
431             subsidy = current_work2.value['subsidy']
432             
433             
434             timestamp = current_work2.value['time']
435             previous_share = tracker.shares[state['best_share_hash']] if state['best_share_hash'] is not None else None
436             new_share_info, generate_tx = p2pool_data.new_generate_transaction(
437                 tracker=tracker,
438                 new_share_data=dict(
439                     previous_share_hash=state['best_share_hash'],
440                     coinbase=aux_str,
441                     nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
442                     new_script=payout_script,
443                     subsidy=subsidy,
444                     donation=math.perfect_round(65535*args.donation_percentage/100),
445                     stale_frac=(lambda shares, stales:
446                         255 if shares == 0 else math.perfect_round(254*stales/shares)
447                     )(*get_share_counts()),
448                 ),
449                 block_target=state['target'],
450                 desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
451                 net=net,
452             )
453             
454             print 'New work for worker! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (bitcoin_data.target_to_difficulty(new_share_info['target']), (sum(t['value'] for t in generate_tx['tx_outs'] if t['script'] == payout_script) -subsidy//200)*1e-8, net.BITCOIN_SYMBOL, subsidy*1e-8, net.BITCOIN_SYMBOL, len(current_work2.value['transactions']))
455             #print 'Target: %x' % (p2pool_data.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
456             #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
457             transactions = [generate_tx] + list(current_work2.value['transactions'])
458             merkle_root = bitcoin_data.merkle_hash(transactions)
459             merkle_root_to_transactions[merkle_root] = new_share_info, transactions
460             
461             target2 = new_share_info['target']
462             times[merkle_root] = time.time()
463             #print 'SENT', 2**256//p2pool_data.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
464             return bitcoin_getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2), state['best_share_hash']
465         
466         my_shares = set()
467         doa_shares = set()
468         times = {}
469         
470         def got_response(header, request):
471             try:
472                 user = worker_interface.get_username(request)
473                 # match up with transactions
474                 xxx = merkle_root_to_transactions.get(header['merkle_root'], None)
475                 if xxx is None:
476                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
477                     return False
478                 share_info, transactions = xxx
479                 new_share_info = share_info
480                 
481                 hash_ = bitcoin_data.block_header_type.hash256(header)
482                 
483                 pow_hash = net.BITCOIN_POW_FUNC(header)
484                 
485                 if pow_hash <= header['target'] or p2pool.DEBUG:
486                     if factory.conn.value is not None:
487                         factory.conn.value.send_block(block=dict(header=header, txs=transactions))
488                     else:
489                         print 'No bitcoind connection! Erp!'
490                     if pow_hash <= header['target']:
491                         print
492                         print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
493                         print
494                 
495                 if current_work.value['aux_work'] is not None and pow_hash <= current_work.value['aux_work']['target']:
496                     try:
497                         aux_pow = dict(
498                             merkle_tx=dict(
499                                 tx=transactions[0],
500                                 block_hash=hash_,
501                                 merkle_branch=[x['hash'] for x in p2pool_data.calculate_merkle_branch(transactions, 0)],
502                                 index=0,
503                             ),
504                             merkle_branch=[],
505                             index=0,
506                             parent_block_header=header,
507                         )
508                         
509                         a, b = transactions[0]['tx_ins'][0]['script'][-32-8:-8].encode('hex'), bitcoin_data.aux_pow_type.pack(aux_pow).encode('hex')
510                         #print a, b
511                         merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
512                         def _(res):
513                             print "MERGED RESULT:", res
514                         merged.rpc_getauxblock(a, b).addBoth(_)
515                     except:
516                         log.err(None, 'Error while processing merged mining POW:')
517                 
518                 target = new_share_info['target']
519                 if pow_hash > target:
520                     print 'Worker submitted share with hash > target:\nhash  : %x\ntarget: %x' % (pow_hash, target)
521                     return False
522                 share = p2pool_data.NewShare(net, header, new_share_info, other_txs=transactions[1:])
523                 my_shares.add(share.hash)
524                 if share.previous_hash != current_work.value['best_share_hash']:
525                     doa_shares.add(share.hash)
526                 print 'GOT SHARE! %s %s prev %s age %.2fs' % (user, p2pool_data.format_hash(share.hash), p2pool_data.format_hash(share.previous_hash), time.time() - times[header['merkle_root']]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
527                 good = share.previous_hash == current_work.value['best_share_hash']
528                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
529                 p2p_shares([share])
530                 # eg. good = share.hash == current_work.value['best_share_hash'] here
531                 return good
532             except:
533                 log.err(None, 'Error processing data received from worker:')
534                 return False
535         
536         web_root = worker_interface.WorkerInterface(compute, got_response, current_work.changed)
537         
538         def get_rate():
539             if current_work.value['best_share_hash'] is not None:
540                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
541                 att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], net, min(height - 1, 720))
542                 fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
543                 return json.dumps(int(att_s / (1. - (math.median(fracs) if fracs else 0))))
544             return json.dumps(None)
545         
546         def get_users():
547             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
548             weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
549             res = {}
550             for script in sorted(weights, key=lambda s: weights[s]):
551                 res[bitcoin_data.script2_to_human(script, net)] = weights[script]/total_weight
552             return json.dumps(res)
553         
554         class WebInterface(resource.Resource):
555             def __init__(self, func, mime_type):
556                 self.func, self.mime_type = func, mime_type
557             
558             def render_GET(self, request):
559                 request.setHeader('Content-Type', self.mime_type)
560                 return self.func()
561         
562         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
563         web_root.putChild('users', WebInterface(get_users, 'application/json'))
564         web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
565         if draw is not None:
566             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
567         
568         reactor.listenTCP(args.worker_port, server.Site(web_root))
569         
570         print '    ...success!'
571         print
572         
573         # done!
574         
575         # do new getwork when a block is heard on the p2p interface
576         
577         def new_block(block_hash):
578             work_updated.happened()
579         factory.new_block.watch(new_block)
580         
581         print 'Started successfully!'
582         print
583         
584         ht.updated.watch(set_real_work2)
585         
586         @defer.inlineCallbacks
587         def work1_thread():
588             while True:
589                 flag = work_updated.get_deferred()
590                 try:
591                     yield set_real_work1()
592                 except:
593                     log.err()
594                 yield defer.DeferredList([flag, deferral.sleep(random.uniform(1, 10))], fireOnOneCallback=True)
595         
596         @defer.inlineCallbacks
597         def work2_thread():
598             while True:
599                 try:
600                     set_real_work2()
601                 except:
602                     log.err()
603                 yield deferral.sleep(random.expovariate(1/20))
604         
605         work1_thread()
606         work2_thread()
607         
608         
609         if hasattr(signal, 'SIGALRM'):
610             def watchdog_handler(signum, frame):
611                 print 'Watchdog timer went off at:'
612                 traceback.print_stack()
613             
614             signal.signal(signal.SIGALRM, watchdog_handler)
615             task.LoopingCall(signal.alarm, 30).start(1)
616         
617         
618         pool_str = None;
619         while True:
620             yield deferral.sleep(3)
621             try:
622                 if time.time() > current_work2.value['last_update'] + 60:
623                     print '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
624                 if current_work.value['best_share_hash'] is not None:
625                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
626                     if height > 2:
627                         att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], net, min(height - 1, 720))
628                         weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
629                         shares, stale_doa_shares, stale_not_doa_shares = get_share_counts(True)
630                         stale_shares = stale_doa_shares + stale_not_doa_shares
631                         fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
632                         str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i' % (
633                             math.format(int(att_s / (1. - (math.median(fracs) if fracs else 0)))),
634                             height,
635                             len(tracker.verified.shares),
636                             len(tracker.shares),
637                             weights.get(my_script, 0)/total_weight*100,
638                             math.format(int(weights.get(my_script, 0)*att_s//total_weight / (1. - (math.median(fracs) if fracs else 0)))),
639                             shares,
640                             stale_not_doa_shares,
641                             stale_doa_shares,
642                             len(p2p_node.peers),
643                         ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
644                         if (str != pool_str):
645                             print str;
646                             pool_str = str;
647                         if fracs:
648                             med = math.median(fracs)
649                             print 'Median stale proportion:', med
650                             if shares:
651                                 print '    Own:', stale_shares/shares
652                                 if med < .99:
653                                     print '    Own efficiency: %.02f%%' % (100*(1 - stale_shares/shares)/(1 - med),)
654             
655             
656             except:
657                 log.err()
658     except:
659         log.err(None, 'Fatal error:')
660     finally:
661         reactor.stop()
662
663 def run():
664     class FixedArgumentParser(argparse.ArgumentParser):
665         def _read_args_from_files(self, arg_strings):
666             # expand arguments referencing files
667             new_arg_strings = []
668             for arg_string in arg_strings:
669                 
670                 # for regular arguments, just add them back into the list
671                 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
672                     new_arg_strings.append(arg_string)
673                 
674                 # replace arguments referencing files with the file content
675                 else:
676                     try:
677                         args_file = open(arg_string[1:])
678                         try:
679                             arg_strings = []
680                             for arg_line in args_file.read().splitlines():
681                                 for arg in self.convert_arg_line_to_args(arg_line):
682                                     arg_strings.append(arg)
683                             arg_strings = self._read_args_from_files(arg_strings)
684                             new_arg_strings.extend(arg_strings)
685                         finally:
686                             args_file.close()
687                     except IOError:
688                         err = sys.exc_info()[1]
689                         self.error(str(err))
690             
691             # return the modified argument list
692             return new_arg_strings
693         
694         def convert_arg_line_to_args(self, arg_line):
695             return [arg for arg in arg_line.split() if arg.strip()]
696     
697     parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
698     parser.add_argument('--version', action='version', version=p2pool.__version__)
699     parser.add_argument('--net',
700         help='use specified network (default: bitcoin)',
701         action='store', choices=sorted(networks.realnets), default='bitcoin', dest='net_name')
702     parser.add_argument('--testnet',
703         help='''use the network's testnet''',
704         action='store_const', const=True, default=False, dest='testnet')
705     parser.add_argument('--debug',
706         help='debugging mode',
707         action='store_const', const=True, default=False, dest='debug')
708     parser.add_argument('-a', '--address',
709         help='generate to this address (defaults to requesting one from bitcoind)',
710         type=str, action='store', default=None, dest='address')
711     parser.add_argument('--logfile',
712         help='''log to specific file (defaults to <network_name>.log in run_p2pool.py's directory)''',
713         type=str, action='store', default=None, dest='logfile')
714     parser.add_argument('--merged-url',
715         help='call getauxblock on this url to get work for merged mining',
716         type=str, action='store', default=None, dest='merged_url')
717     parser.add_argument('--merged-userpass',
718         help='merge daemon user and password, separated by a colon. Example: ncuser:ncpass',
719         type=str, action='store', default=None, dest='merged_userpass')
720     parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
721         help='percentage amount to donate to author of p2pool. Default: 0.5',
722         type=float, action='store', default=0.5, dest='donation_percentage')
723     
724     p2pool_group = parser.add_argument_group('p2pool interface')
725     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
726         help='use TCP port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.P2P_PORT) for _, n in sorted(networks.realnets.items())),
727         type=int, action='store', default=None, dest='p2pool_port')
728     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
729         help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to default p2pool P2P port), in addition to builtin addresses',
730         type=str, action='append', default=[], dest='p2pool_nodes')
731     parser.add_argument('--disable-upnp',
732         help='''don't attempt to forward p2pool P2P port from the WAN to this computer using UPnP''',
733         action='store_false', default=True, dest='upnp')
734     
735     worker_group = parser.add_argument_group('worker interface')
736     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
737         help='listen on PORT for RPC connections from miners asking for work and providing responses (default:%s)' % ', '.join('%s:%i' % (n.NAME, n.WORKER_PORT) for _, n in sorted(networks.realnets.items())),
738         type=int, action='store', default=None, dest='worker_port')
739     worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
740         help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee . default: 0''',
741         type=float, action='store', default=0, dest='worker_fee')
742     
743     bitcoind_group = parser.add_argument_group('bitcoind interface')
744     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
745         help='connect to a bitcoind at this address (default: 127.0.0.1)',
746         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
747     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
748         help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getmemorypool (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.BITCOIN_RPC_PORT) for _, n in sorted(networks.realnets.items())),
749         type=int, action='store', default=None, dest='bitcoind_rpc_port')
750     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
751         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.BITCOIN_P2P_PORT) for _, n in sorted(networks.realnets.items())),
752         type=int, action='store', default=None, dest='bitcoind_p2p_port')
753     
754     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSER',
755         help='bitcoind RPC interface username (default: empty)',
756         type=str, action='store', default='', nargs='?', dest='bitcoind_rpc_username')
757     bitcoind_group.add_argument(metavar='BITCOIND_RPCPASSWORD',
758         help='bitcoind RPC interface password',
759         type=str, action='store', dest='bitcoind_rpc_password')
760     
761     args = parser.parse_args()
762     
763     if args.debug:
764         p2pool.DEBUG = True
765     
766     net = networks.nets[args.net_name + ('_testnet' if args.testnet else '')]
767     
768     if args.logfile is None:
769         args.logfile = os.path.join(os.path.dirname(sys.argv[0]), net.NAME + '.log')
770     
771     class LogFile(object):
772         def __init__(self, filename):
773             self.filename = filename
774             self.inner_file = None
775             self.reopen()
776         def reopen(self):
777             if self.inner_file is not None:
778                 self.inner_file.close()
779             open(self.filename, 'a').close()
780             f = open(self.filename, 'rb')
781             f.seek(0, os.SEEK_END)
782             length = f.tell()
783             if length > 100*1000*1000:
784                 f.seek(-1000*1000, os.SEEK_END)
785                 while True:
786                     if f.read(1) in ('', '\n'):
787                         break
788                 data = f.read()
789                 f.close()
790                 f = open(self.filename, 'wb')
791                 f.write(data)
792             f.close()
793             self.inner_file = open(self.filename, 'a')
794         def write(self, data):
795             self.inner_file.write(data)
796         def flush(self):
797             self.inner_file.flush()
798     class TeePipe(object):
799         def __init__(self, outputs):
800             self.outputs = outputs
801         def write(self, data):
802             for output in self.outputs:
803                 output.write(data)
804         def flush(self):
805             for output in self.outputs:
806                 output.flush()
807     class TimestampingPipe(object):
808         def __init__(self, inner_file):
809             self.inner_file = inner_file
810             self.buf = ''
811             self.softspace = 0
812         def write(self, data):
813             buf = self.buf + data
814             lines = buf.split('\n')
815             for line in lines[:-1]:
816                 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
817                 self.inner_file.flush()
818             self.buf = lines[-1]
819         def flush(self):
820             pass
821     logfile = LogFile(args.logfile)
822     sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
823     if hasattr(signal, "SIGUSR1"):
824         def sigusr1(signum, frame):
825             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
826             logfile.reopen()
827             print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
828         signal.signal(signal.SIGUSR1, sigusr1)
829     task.LoopingCall(logfile.reopen).start(5)
830     
831     if args.bitcoind_rpc_port is None:
832         args.bitcoind_rpc_port = net.BITCOIN_RPC_PORT
833     
834     if args.bitcoind_p2p_port is None:
835         args.bitcoind_p2p_port = net.BITCOIN_P2P_PORT
836     
837     if args.p2pool_port is None:
838         args.p2pool_port = net.P2P_PORT
839     
840     if args.worker_port is None:
841         args.worker_port = net.WORKER_PORT
842     
843     if args.address is not None:
844         try:
845             args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net)
846         except Exception, e:
847             parser.error('error parsing address: ' + repr(e))
848     else:
849         args.pubkey_hash = None
850     
851     if (args.merged_url is None) ^ (args.merged_userpass is None):
852         parser.error('must specify --merged-url and --merged-userpass')
853     
854     reactor.callWhenRunning(main, args, net)
855     reactor.run()