d17c952201a3f16aca74a350bd8b769fffcd83fa
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2 # coding=utf-8
3
4 from __future__ import division
5
6 import argparse
7 import datetime
8 import itertools
9 import os
10 import random
11 import struct
12 import sys
13 import time
14 import json
15 import signal
16 import traceback
17
18 from twisted.internet import defer, reactor, task
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
22
23 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
24 from bitcoin import worker_interface
25 from util import expiring_dict, jsonrpc, variable, deferral, math
26 from . import p2p, skiplists, networks
27 import p2pool, p2pool.data as p2pool_data
28
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind):
32     work = yield bitcoind.rpc_getmemorypool()
33     defer.returnValue(dict(
34         version=work['version'],
35         previous_block_hash=int(work['previousblockhash'], 16),
36         transactions=[bitcoin_data.tx_type.unpack(x.decode('hex')) for x in work['transactions']],
37         subsidy=work['coinbasevalue'],
38         time=work['time'],
39         target=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
40     ))
41
42 @deferral.retry('Error creating payout script:', 10)
43 @defer.inlineCallbacks
44 def get_payout_script2(bitcoind, net):
45     address = yield bitcoind.rpc_getaccountaddress('p2pool')
46     validate_response = yield bitcoind.rpc_validateaddress(address)
47     if 'pubkey' not in validate_response:
48         print '    Pubkey request failed. Falling back to payout to address.'
49         defer.returnValue(bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(address, net)))
50     pubkey = validate_response['pubkey'].decode('hex')
51     defer.returnValue(bitcoin_data.pubkey_to_script2(pubkey))
52
53 @defer.inlineCallbacks
54 def main(args, net):
55     try:
56         print 'p2pool (version %s)' % (p2pool.__version__,)
57         print
58         try:
59             from . import draw
60         except ImportError:
61             draw = None
62             print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
63             print
64         
65         # connect to bitcoind over JSON-RPC and do initial getmemorypool
66         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
67         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
68         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
69         good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.BITCOIN_RPC_CHECK)(bitcoind)
70         if not good:
71             print "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
72             return
73         temp_work = yield getwork(bitcoind)
74         print '    ...success!'
75         print '    Current block hash: %x' % (temp_work['previous_block_hash'],)
76         print
77         
78         # connect to bitcoind over bitcoin-p2p
79         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
80         factory = bitcoin_p2p.ClientFactory(net)
81         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
82         yield factory.getProtocol() # waits until handshake is successful
83         print '    ...success!'
84         print
85         
86         if args.pubkey_hash is None:
87             print 'Getting payout address from bitcoind...'
88             my_script = yield get_payout_script2(bitcoind, net)
89         else:
90             print 'Computing payout script from provided address....'
91             my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
92         print '    ...success!'
93         print '    Payout script:', bitcoin_data.script2_to_human(my_script, net)
94         print
95         
96         ht = bitcoin_p2p.HeightTracker(bitcoind, factory)
97         
98         tracker = p2pool_data.OkayTracker(net)
99         shared_share_hashes = set()
100         ss = p2pool_data.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), net.NAME + '_shares.'), net)
101         known_verified = set()
102         print "Loading shares..."
103         for i, (mode, contents) in enumerate(ss.get_shares()):
104             if mode == 'share':
105                 if contents.hash in tracker.shares:
106                     continue
107                 shared_share_hashes.add(contents.hash)
108                 contents.time_seen = 0
109                 tracker.add(contents)
110                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
111                     print "    %i" % (len(tracker.shares),)
112             elif mode == 'verified_hash':
113                 known_verified.add(contents)
114             else:
115                 raise AssertionError()
116         print "    ...inserting %i verified shares..." % (len(known_verified),)
117         for h in known_verified:
118             if h not in tracker.shares:
119                 ss.forget_verified_share(h)
120                 continue
121             tracker.verified.add(tracker.shares[h])
122         print "    ...done loading %i shares!" % (len(tracker.shares),)
123         print
124         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
125         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
126         tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
127         
128         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
129         
130         pre_current_work = variable.Variable(None)
131         pre_current_work2 = variable.Variable(None)
132         pre_merged_work = variable.Variable(None)
133         # information affecting work that should trigger a long-polling update
134         current_work = variable.Variable(None)
135         # information affecting work that should not trigger a long-polling update
136         current_work2 = variable.Variable(None)
137         
138         work_updated = variable.Event()
139         
140         requested = expiring_dict.ExpiringDict(300)
141         
142         @defer.inlineCallbacks
143         def set_real_work1():
144             work = yield getwork(bitcoind)
145             pre_current_work2.set(dict(
146                 time=work['time'],
147                 transactions=work['transactions'],
148                 subsidy=work['subsidy'],
149                 clock_offset=time.time() - work['time'],
150                 last_update=time.time(),
151             )) # second set first because everything hooks on the first
152             pre_current_work.set(dict(
153                 version=work['version'],
154                 previous_block=work['previous_block_hash'],
155                 target=work['target'],
156             ))
157         
158         def set_real_work2():
159             best, desired = tracker.think(ht, pre_current_work.value['previous_block'], time.time() - pre_current_work2.value['clock_offset'])
160             
161             current_work2.set(pre_current_work2.value)
162             t = dict(pre_current_work.value)
163             t['best_share_hash'] = best
164             t['aux_work'] = pre_merged_work.value
165             current_work.set(t)
166             
167             t = time.time()
168             for peer2, share_hash in desired:
169                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
170                     continue
171                 last_request_time, count = requested.get(share_hash, (None, 0))
172                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
173                     continue
174                 potential_peers = set()
175                 for head in tracker.tails[share_hash]:
176                     potential_peers.update(peer_heads.get(head, set()))
177                 potential_peers = [peer for peer in potential_peers if peer.connected2]
178                 if count == 0 and peer2 is not None and peer2.connected2:
179                     peer = peer2
180                 else:
181                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
182                     if peer is None:
183                         continue
184                 
185                 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
186                 peer.send_getshares(
187                     hashes=[share_hash],
188                     parents=2000,
189                     stops=list(set(tracker.heads) | set(
190                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
191                     ))[:100],
192                 )
193                 requested[share_hash] = t, count + 1
194         pre_current_work.changed.watch(lambda _: set_real_work2())
195         
196         print 'Initializing work...'
197         yield set_real_work1()
198         print '    ...success!'
199         print
200         
201         pre_merged_work.changed.watch(lambda _: set_real_work2())
202         ht.updated.watch(set_real_work2)
203         
204         @defer.inlineCallbacks
205         def set_merged_work():
206             if not args.merged_url:
207                 return
208             merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
209             while True:
210                 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged.rpc_getauxblock)()
211                 pre_merged_work.set(dict(
212                     hash=int(auxblock['hash'], 16),
213                     target=bitcoin_data.HashType().unpack(auxblock['target'].decode('hex')),
214                     chain_id=auxblock['chainid'],
215                 ))
216                 yield deferral.sleep(1)
217         set_merged_work()
218         
219         start_time = time.time() - current_work2.value['clock_offset']
220         
221         # setup p2p logic and join p2pool network
222         
223         def p2p_shares(shares, peer=None):
224             if len(shares) > 5:
225                 print 'Processing %i shares...' % (len(shares),)
226             
227             new_count = 0
228             for share in shares:
229                 if share.hash in tracker.shares:
230                     #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
231                     continue
232                 
233                 new_count += 1
234                 
235                 #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
236                 
237                 tracker.add(share)
238             
239             if shares and peer is not None:
240                 peer_heads.setdefault(shares[0].hash, set()).add(peer)
241             
242             if new_count:
243                 set_real_work2()
244             
245             if len(shares) > 5:
246                 print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
247         
248         @tracker.verified.added.watch
249         def _(share):
250             if share.pow_hash <= share.header['target']:
251                 if factory.conn.value is not None:
252                     factory.conn.value.send_block(block=share.as_block(tracker, net))
253                 else:
254                     print 'No bitcoind connection! Erp!'
255                 print
256                 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool_data.format_hash(share.hash), share.header_hash,)
257                 print
258         
259         def p2p_share_hashes(share_hashes, peer):
260             t = time.time()
261             get_hashes = []
262             for share_hash in share_hashes:
263                 if share_hash in tracker.shares:
264                     continue
265                 last_request_time, count = requested.get(share_hash, (None, 0))
266                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
267                     continue
268                 print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
269                 get_hashes.append(share_hash)
270                 requested[share_hash] = t, count + 1
271             
272             if share_hashes and peer is not None:
273                 peer_heads.setdefault(share_hashes[0], set()).add(peer)
274             if get_hashes:
275                 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
276         
277         def p2p_get_shares(share_hashes, parents, stops, peer):
278             parents = min(parents, 1000//len(share_hashes))
279             stops = set(stops)
280             shares = []
281             for share_hash in share_hashes:
282                 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
283                     if share.hash in stops:
284                         break
285                     shares.append(share)
286             print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
287             peer.sendShares(shares)
288         
289         print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
290         
291         def parse(x):
292             if ':' in x:
293                 ip, port = x.split(':')
294                 return ip, int(port)
295             else:
296                 return x, net.P2P_PORT
297         
298         nodes = set([
299             ('72.14.191.28', net.P2P_PORT),
300             ('62.204.197.159', net.P2P_PORT),
301             ('142.58.248.28', net.P2P_PORT),
302             ('94.23.34.145', net.P2P_PORT),
303         ])
304         for host in [
305             'p2pool.forre.st',
306             'dabuttonfactory.com',
307         ]:
308             try:
309                 nodes.add(((yield reactor.resolve(host)), net.P2P_PORT))
310             except:
311                 log.err(None, 'Error resolving bootstrap node IP:')
312         
313         if net.NAME == 'litecoin':
314             nodes.add(((yield reactor.resolve('liteco.in')), net.P2P_PORT))
315         
316         addrs = {}
317         try:
318             addrs = dict(eval(x) for x in open(os.path.join(os.path.dirname(sys.argv[0]), net.NAME + '_addrs.txt')))
319         except:
320             print "error reading addrs"
321         
322         def save_addrs():
323             open(os.path.join(os.path.dirname(sys.argv[0]), net.NAME + '_addrs.txt'), 'w').writelines(repr(x) + '\n' for x in addrs.iteritems())
324         task.LoopingCall(save_addrs).start(60)
325         
326         p2p_node = p2p.Node(
327             current_work=current_work,
328             port=args.p2pool_port,
329             net=net,
330             addr_store=addrs,
331             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
332         )
333         p2p_node.handle_shares = p2p_shares
334         p2p_node.handle_share_hashes = p2p_share_hashes
335         p2p_node.handle_get_shares = p2p_get_shares
336         
337         p2p_node.start()
338         
339         # send share when the chain changes to their chain
340         def work_changed(new_work):
341             #print 'Work changed:', new_work
342             shares = []
343             for share in tracker.get_chain_known(new_work['best_share_hash']):
344                 if share.hash in shared_share_hashes:
345                     break
346                 shared_share_hashes.add(share.hash)
347                 shares.append(share)
348             
349             for peer in p2p_node.peers.itervalues():
350                 peer.sendShares([share for share in shares if share.peer is not peer])
351         
352         current_work.changed.watch(work_changed)
353         
354         def save_shares():
355             for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 2*net.CHAIN_LENGTH):
356                 ss.add_share(share)
357                 if share.hash in tracker.verified.shares:
358                     ss.add_verified_hash(share.hash)
359         task.LoopingCall(save_shares).start(60)
360         
361         print '    ...success!'
362         print
363         
364         @defer.inlineCallbacks
365         def upnp_thread():
366             while True:
367                 try:
368                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
369                     if is_lan:
370                         pm = yield portmapper.get_port_mapper()
371                         yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
372                 except defer.TimeoutError:
373                     pass
374                 except:
375                     if p2pool.DEBUG:
376                         log.err(None, "UPnP error:")
377                 yield deferral.sleep(random.expovariate(1/120))
378         
379         if args.upnp:
380             upnp_thread()
381         
382         # start listening for workers with a JSON-RPC server
383         
384         print 'Listening for workers on port %i...' % (args.worker_port,)
385         
386         # setup worker logic
387         
388         merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
389         run_identifier = struct.pack('<I', random.randrange(2**32))
390         
391         share_counter = skiplists.CountsSkipList(tracker, run_identifier)
392         removed_unstales = set()
393         def get_share_counts(doa=False):
394             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
395             matching_in_chain = share_counter(current_work.value['best_share_hash'], height) | removed_unstales
396             shares_in_chain = my_shares & matching_in_chain
397             stale_shares = my_shares - matching_in_chain
398             if doa:
399                 stale_doa_shares = stale_shares & doa_shares
400                 stale_not_doa_shares = stale_shares - stale_doa_shares
401                 return len(shares_in_chain) + len(stale_shares), len(stale_doa_shares), len(stale_not_doa_shares)
402             return len(shares_in_chain) + len(stale_shares), len(stale_shares)
403         @tracker.verified.removed.watch
404         def _(share):
405             if share.hash in my_shares and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
406                 removed_unstales.add(share.hash)
407         
408         
409         def get_payout_script_from_username(request):
410             user = worker_interface.get_username(request)
411             if user is None:
412                 return None
413             try:
414                 return bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(user, net))
415             except: # XXX blah
416                 return None
417         
418         def compute(request):
419             state = current_work.value
420             
421             payout_script = get_payout_script_from_username(request)
422             if payout_script is None or random.uniform(0, 100) < args.worker_fee:
423                 payout_script = my_script
424             
425             if len(p2p_node.peers) == 0 and net.PERSIST:
426                 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
427             if state['best_share_hash'] is None and net.PERSIST:
428                 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
429             if time.time() > current_work2.value['last_update'] + 60:
430                 raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
431             
432             previous_share = None if state['best_share_hash'] is None else tracker.shares[state['best_share_hash']]
433             subsidy = current_work2.value['subsidy']
434             share_info, generate_tx = p2pool_data.generate_transaction(
435                 tracker=tracker,
436                 share_data=dict(
437                     previous_share_hash=state['best_share_hash'],
438                     coinbase='' if state['aux_work'] is None else '\xfa\xbemm' + bitcoin_data.HashType().pack(state['aux_work']['hash'])[::-1] + struct.pack('<ii', 1, 0),
439                     nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
440                     new_script=payout_script,
441                     subsidy=subsidy,
442                     donation=math.perfect_round(65535*args.donation_percentage/100),
443                     stale_frac=(lambda shares, stales:
444                         255 if shares == 0 else math.perfect_round(254*stales/shares)
445                     )(*get_share_counts()),
446                 ),
447                 block_target=state['target'],
448                 desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
449                 net=net,
450             )
451             
452             print 'New work for worker! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (
453                 bitcoin_data.target_to_difficulty(share_info['target']),
454                 (sum(t['value'] for t in generate_tx['tx_outs'] if t['script'] == payout_script) - subsidy//200)*1e-8, net.BITCOIN_SYMBOL,
455                 subsidy*1e-8, net.BITCOIN_SYMBOL,
456                 len(current_work2.value['transactions']),
457             )
458             
459             transactions = [generate_tx] + list(current_work2.value['transactions'])
460             merkle_root = bitcoin_data.merkle_hash(transactions)
461             merkle_root_to_transactions[merkle_root] = share_info, transactions, time.time()
462             
463             return bitcoin_getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, current_work2.value['time'], state['target'], share_info['target']), state['best_share_hash']
464         
465         my_shares = set()
466         doa_shares = set()
467         
468         def got_response(header, request):
469             try:
470                 user = worker_interface.get_username(request)
471                 # match up with transactions
472                 xxx = merkle_root_to_transactions.get(header['merkle_root'], None)
473                 if xxx is None:
474                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
475                     return False
476                 share_info, transactions, getwork_time = xxx
477                 
478                 hash_ = bitcoin_data.block_header_type.hash256(header)
479                 
480                 pow_hash = net.BITCOIN_POW_FUNC(header)
481                 
482                 if pow_hash <= header['target'] or p2pool.DEBUG:
483                     if factory.conn.value is not None:
484                         factory.conn.value.send_block(block=dict(header=header, txs=transactions))
485                     else:
486                         print 'No bitcoind connection! Erp!'
487                     if pow_hash <= header['target']:
488                         print
489                         print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
490                         print
491                 
492                 if current_work.value['aux_work'] is not None and pow_hash <= current_work.value['aux_work']['target']:
493                     try:
494                         aux_pow = dict(
495                             merkle_tx=dict(
496                                 tx=transactions[0],
497                                 block_hash=hash_,
498                                 merkle_branch=[x['hash'] for x in p2pool_data.calculate_merkle_branch(transactions, 0)],
499                                 index=0,
500                             ),
501                             merkle_branch=[],
502                             index=0,
503                             parent_block_header=header,
504                         )
505                         
506                         a, b = transactions[0]['tx_ins'][0]['script'][-32-8:-8].encode('hex'), bitcoin_data.aux_pow_type.pack(aux_pow).encode('hex')
507                         #print a, b
508                         merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
509                         def _(res):
510                             print "MERGED RESULT:", res
511                         merged.rpc_getauxblock(a, b).addBoth(_)
512                     except:
513                         log.err(None, 'Error while processing merged mining POW:')
514                 
515                 target = share_info['target']
516                 if pow_hash > target:
517                     print 'Worker submitted share with hash > target:\nhash  : %x\ntarget: %x' % (pow_hash, target)
518                     return False
519                 share = p2pool_data.Share(net, header, share_info, other_txs=transactions[1:])
520                 my_shares.add(share.hash)
521                 if share.previous_hash != current_work.value['best_share_hash']:
522                     doa_shares.add(share.hash)
523                 print 'GOT SHARE! %s %s prev %s age %.2fs' % (user, p2pool_data.format_hash(share.hash), p2pool_data.format_hash(share.previous_hash), time.time() - getwork_time) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
524                 good = share.previous_hash == current_work.value['best_share_hash']
525                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
526                 p2p_shares([share])
527                 # eg. good = share.hash == current_work.value['best_share_hash'] here
528                 return good
529             except:
530                 log.err(None, 'Error processing data received from worker:')
531                 return False
532         
533         web_root = worker_interface.WorkerInterface(compute, got_response, current_work.changed)
534         
535         def get_rate():
536             if current_work.value['best_share_hash'] is not None:
537                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
538                 att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720))
539                 fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
540                 return json.dumps(int(att_s / (1. - (math.median(fracs) if fracs else 0))))
541             return json.dumps(None)
542         
543         def get_users():
544             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
545             weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
546             res = {}
547             for script in sorted(weights, key=lambda s: weights[s]):
548                 res[bitcoin_data.script2_to_human(script, net)] = weights[script]/total_weight
549             return json.dumps(res)
550         
551         class WebInterface(resource.Resource):
552             def __init__(self, func, mime_type):
553                 self.func, self.mime_type = func, mime_type
554             
555             def render_GET(self, request):
556                 request.setHeader('Content-Type', self.mime_type)
557                 return self.func()
558         
559         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
560         web_root.putChild('users', WebInterface(get_users, 'application/json'))
561         web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
562         if draw is not None:
563             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
564         
565         reactor.listenTCP(args.worker_port, server.Site(web_root))
566         
567         print '    ...success!'
568         print
569         
570         # done!
571         
572         # do new getwork when a block is heard on the p2p interface
573         
574         def new_block(block_hash):
575             work_updated.happened()
576         factory.new_block.watch(new_block)
577         
578         print 'Started successfully!'
579         print
580         
581         @defer.inlineCallbacks
582         def work1_thread():
583             while True:
584                 flag = work_updated.get_deferred()
585                 try:
586                     yield set_real_work1()
587                 except:
588                     log.err()
589                 yield defer.DeferredList([flag, deferral.sleep(random.uniform(1, 10))], fireOnOneCallback=True)
590         
591         @defer.inlineCallbacks
592         def work2_thread():
593             while True:
594                 try:
595                     set_real_work2()
596                 except:
597                     log.err()
598                 yield deferral.sleep(random.expovariate(1/20))
599         
600         work1_thread()
601         work2_thread()
602         
603         
604         if hasattr(signal, 'SIGALRM'):
605             def watchdog_handler(signum, frame):
606                 print 'Watchdog timer went off at:'
607                 traceback.print_stack()
608             
609             signal.signal(signal.SIGALRM, watchdog_handler)
610             task.LoopingCall(signal.alarm, 30).start(1)
611         
612         last_str = None
613         last_time = 0
614         while True:
615             yield deferral.sleep(3)
616             try:
617                 if time.time() > current_work2.value['last_update'] + 60:
618                     print '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
619                 if current_work.value['best_share_hash'] is not None:
620                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
621                     if height > 2:
622                         att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720))
623                         weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
624                         shares, stale_doa_shares, stale_not_doa_shares = get_share_counts(True)
625                         stale_shares = stale_doa_shares + stale_not_doa_shares
626                         fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
627                         this_str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i' % (
628                             math.format(int(att_s / (1. - (math.median(fracs) if fracs else 0)))),
629                             height,
630                             len(tracker.verified.shares),
631                             len(tracker.shares),
632                             weights.get(my_script, 0)/total_weight*100,
633                             math.format(int(weights.get(my_script, 0)*att_s//total_weight / (1. - (math.median(fracs) if fracs else 0)))),
634                             shares,
635                             stale_not_doa_shares,
636                             stale_doa_shares,
637                             len(p2p_node.peers),
638                         ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
639                         if fracs:
640                             med = math.median(fracs)
641                             this_str += '\nPool stales: %i%%' % (int(100*med+.5),)
642                             conf = 0.9
643                             if shares:
644                                 this_str += ' Own: %i±%i%%' % tuple(int(100*x+.5) for x in math.interval_to_center_radius(math.binomial_conf_interval(stale_shares, shares, conf)))
645                                 if med < .99:
646                                     this_str += ' Own efficiency: %i±%i%%' % tuple(int(100*x+.5) for x in math.interval_to_center_radius((1 - y)/(1 - med) for y in math.binomial_conf_interval(stale_shares, shares, conf)[::-1]))
647                                 this_str += ' (%i%% confidence)' % (int(100*conf+.5),)
648                         if this_str != last_str or time.time() > last_time + 15:
649                             print this_str
650                             last_str = this_str
651                             last_time = time.time()
652             
653             
654             except:
655                 log.err()
656     except:
657         log.err(None, 'Fatal error:')
658     finally:
659         reactor.stop()
660
661 def run():
662     class FixedArgumentParser(argparse.ArgumentParser):
663         def _read_args_from_files(self, arg_strings):
664             # expand arguments referencing files
665             new_arg_strings = []
666             for arg_string in arg_strings:
667                 
668                 # for regular arguments, just add them back into the list
669                 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
670                     new_arg_strings.append(arg_string)
671                 
672                 # replace arguments referencing files with the file content
673                 else:
674                     try:
675                         args_file = open(arg_string[1:])
676                         try:
677                             arg_strings = []
678                             for arg_line in args_file.read().splitlines():
679                                 for arg in self.convert_arg_line_to_args(arg_line):
680                                     arg_strings.append(arg)
681                             arg_strings = self._read_args_from_files(arg_strings)
682                             new_arg_strings.extend(arg_strings)
683                         finally:
684                             args_file.close()
685                     except IOError:
686                         err = sys.exc_info()[1]
687                         self.error(str(err))
688             
689             # return the modified argument list
690             return new_arg_strings
691         
692         def convert_arg_line_to_args(self, arg_line):
693             return [arg for arg in arg_line.split() if arg.strip()]
694     
695     parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
696     parser.add_argument('--version', action='version', version=p2pool.__version__)
697     parser.add_argument('--net',
698         help='use specified network (default: bitcoin)',
699         action='store', choices=sorted(networks.realnets), default='bitcoin', dest='net_name')
700     parser.add_argument('--testnet',
701         help='''use the network's testnet''',
702         action='store_const', const=True, default=False, dest='testnet')
703     parser.add_argument('--debug',
704         help='debugging mode',
705         action='store_const', const=True, default=False, dest='debug')
706     parser.add_argument('-a', '--address',
707         help='generate to this address (defaults to requesting one from bitcoind)',
708         type=str, action='store', default=None, dest='address')
709     parser.add_argument('--logfile',
710         help='''log to specific file (defaults to <network_name>.log in run_p2pool.py's directory)''',
711         type=str, action='store', default=None, dest='logfile')
712     parser.add_argument('--merged-url',
713         help='call getauxblock on this url to get work for merged mining',
714         type=str, action='store', default=None, dest='merged_url')
715     parser.add_argument('--merged-userpass',
716         help='merge daemon user and password, separated by a colon. Example: ncuser:ncpass',
717         type=str, action='store', default=None, dest='merged_userpass')
718     parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
719         help='percentage amount to donate to author of p2pool. Default: 0.5',
720         type=float, action='store', default=0.5, dest='donation_percentage')
721     
722     p2pool_group = parser.add_argument_group('p2pool interface')
723     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
724         help='use TCP port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.P2P_PORT) for _, n in sorted(networks.realnets.items())),
725         type=int, action='store', default=None, dest='p2pool_port')
726     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
727         help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to default p2pool P2P port), in addition to builtin addresses',
728         type=str, action='append', default=[], dest='p2pool_nodes')
729     parser.add_argument('--disable-upnp',
730         help='''don't attempt to forward p2pool P2P port from the WAN to this computer using UPnP''',
731         action='store_false', default=True, dest='upnp')
732     
733     worker_group = parser.add_argument_group('worker interface')
734     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
735         help='listen on PORT for RPC connections from miners asking for work and providing responses (default:%s)' % ', '.join('%s:%i' % (n.NAME, n.WORKER_PORT) for _, n in sorted(networks.realnets.items())),
736         type=int, action='store', default=None, dest='worker_port')
737     worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
738         help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee . default: 0''',
739         type=float, action='store', default=0, dest='worker_fee')
740     
741     bitcoind_group = parser.add_argument_group('bitcoind interface')
742     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
743         help='connect to a bitcoind at this address (default: 127.0.0.1)',
744         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
745     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
746         help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getmemorypool (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.BITCOIN_RPC_PORT) for _, n in sorted(networks.realnets.items())),
747         type=int, action='store', default=None, dest='bitcoind_rpc_port')
748     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
749         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.BITCOIN_P2P_PORT) for _, n in sorted(networks.realnets.items())),
750         type=int, action='store', default=None, dest='bitcoind_p2p_port')
751     
752     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSER',
753         help='bitcoind RPC interface username (default: empty)',
754         type=str, action='store', default='', nargs='?', dest='bitcoind_rpc_username')
755     bitcoind_group.add_argument(metavar='BITCOIND_RPCPASSWORD',
756         help='bitcoind RPC interface password',
757         type=str, action='store', dest='bitcoind_rpc_password')
758     
759     args = parser.parse_args()
760     
761     if args.debug:
762         p2pool.DEBUG = True
763     
764     net = networks.nets[args.net_name + ('_testnet' if args.testnet else '')]
765     
766     if args.logfile is None:
767         args.logfile = os.path.join(os.path.dirname(sys.argv[0]), net.NAME + '.log')
768     
769     class LogFile(object):
770         def __init__(self, filename):
771             self.filename = filename
772             self.inner_file = None
773             self.reopen()
774         def reopen(self):
775             if self.inner_file is not None:
776                 self.inner_file.close()
777             open(self.filename, 'a').close()
778             f = open(self.filename, 'rb')
779             f.seek(0, os.SEEK_END)
780             length = f.tell()
781             if length > 100*1000*1000:
782                 f.seek(-1000*1000, os.SEEK_END)
783                 while True:
784                     if f.read(1) in ('', '\n'):
785                         break
786                 data = f.read()
787                 f.close()
788                 f = open(self.filename, 'wb')
789                 f.write(data)
790             f.close()
791             self.inner_file = open(self.filename, 'a')
792         def write(self, data):
793             self.inner_file.write(data)
794         def flush(self):
795             self.inner_file.flush()
796     class TeePipe(object):
797         def __init__(self, outputs):
798             self.outputs = outputs
799         def write(self, data):
800             for output in self.outputs:
801                 output.write(data)
802         def flush(self):
803             for output in self.outputs:
804                 output.flush()
805     class TimestampingPipe(object):
806         def __init__(self, inner_file):
807             self.inner_file = inner_file
808             self.buf = ''
809             self.softspace = 0
810         def write(self, data):
811             buf = self.buf + data
812             lines = buf.split('\n')
813             for line in lines[:-1]:
814                 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
815                 self.inner_file.flush()
816             self.buf = lines[-1]
817         def flush(self):
818             pass
819     logfile = LogFile(args.logfile)
820     sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
821     if hasattr(signal, "SIGUSR1"):
822         def sigusr1(signum, frame):
823             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
824             logfile.reopen()
825             print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
826         signal.signal(signal.SIGUSR1, sigusr1)
827     task.LoopingCall(logfile.reopen).start(5)
828     
829     if args.bitcoind_rpc_port is None:
830         args.bitcoind_rpc_port = net.BITCOIN_RPC_PORT
831     
832     if args.bitcoind_p2p_port is None:
833         args.bitcoind_p2p_port = net.BITCOIN_P2P_PORT
834     
835     if args.p2pool_port is None:
836         args.p2pool_port = net.P2P_PORT
837     
838     if args.worker_port is None:
839         args.worker_port = net.WORKER_PORT
840     
841     if args.address is not None:
842         try:
843             args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net)
844         except Exception, e:
845             parser.error('error parsing address: ' + repr(e))
846     else:
847         args.pubkey_hash = None
848     
849     if (args.merged_url is None) ^ (args.merged_userpass is None):
850         parser.error('must specify --merged-url and --merged-userpass')
851     
852     reactor.callWhenRunning(main, args, net)
853     reactor.run()