renamed new* to *
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2
3 from __future__ import division
4
5 import argparse
6 import datetime
7 import itertools
8 import os
9 import random
10 import sqlite3
11 import struct
12 import sys
13 import time
14 import json
15 import signal
16 import traceback
17
18 from twisted.internet import defer, reactor, task
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
22
23 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
24 from bitcoin import worker_interface
25 from util import db, expiring_dict, jsonrpc, variable, deferral, math
26 from . import p2p, skiplists, networks
27 import p2pool, p2pool.data as p2pool_data
28
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind):
32     work = yield bitcoind.rpc_getmemorypool()
33     defer.returnValue(dict(
34         version=work['version'],
35         previous_block_hash=int(work['previousblockhash'], 16),
36         transactions=[bitcoin_data.tx_type.unpack(x.decode('hex')) for x in work['transactions']],
37         subsidy=work['coinbasevalue'],
38         time=work['time'],
39         target=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
40     ))
41
42 @deferral.retry('Error creating payout script:', 10)
43 @defer.inlineCallbacks
44 def get_payout_script2(bitcoind, net):
45     address = yield bitcoind.rpc_getaccountaddress('p2pool')
46     try:
47         pubkey = (yield bitcoind.rpc_validateaddress(address))['pubkey'].decode('hex')
48     except:
49         log.err()
50         defer.returnValue(bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(address, net)))
51     else:
52         defer.returnValue(bitcoin_data.pubkey_to_script2(pubkey))
53
54 @defer.inlineCallbacks
55 def main(args, net):
56     try:
57         print 'p2pool (version %s)' % (p2pool.__version__,)
58         print
59         try:
60             from . import draw
61         except ImportError:
62             draw = None
63             print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
64             print
65         
66         # connect to bitcoind over JSON-RPC and do initial getmemorypool
67         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
68         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
69         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
70         good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.BITCOIN_RPC_CHECK)(bitcoind)
71         if not good:
72             print "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
73             return
74         temp_work = yield getwork(bitcoind)
75         print '    ...success!'
76         print '    Current block hash: %x' % (temp_work['previous_block_hash'],)
77         print
78         
79         # connect to bitcoind over bitcoin-p2p
80         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
81         factory = bitcoin_p2p.ClientFactory(net)
82         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
83         yield factory.getProtocol() # waits until handshake is successful
84         print '    ...success!'
85         print
86         
87         if args.pubkey_hash is None:
88             print 'Getting payout address from bitcoind...'
89             my_script = yield get_payout_script2(bitcoind, net)
90         else:
91             print 'Computing payout script from provided address....'
92             my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
93         print '    ...success!'
94         print '    Payout script:', bitcoin_data.script2_to_human(my_script, net)
95         print
96         
97         print 'Loading cached block headers...'
98         ht = bitcoin_p2p.HeightTracker(factory, net.NAME + '_headers.dat')
99         print '   ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
100         print
101         
102         tracker = p2pool_data.OkayTracker(net)
103         shared_share_hashes = set()
104         ss = p2pool_data.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), net.NAME + '_shares.'), net)
105         known_verified = set()
106         print "Loading shares..."
107         for i, (mode, contents) in enumerate(ss.get_shares()):
108             if mode == 'share':
109                 if contents.hash in tracker.shares:
110                     continue
111                 shared_share_hashes.add(contents.hash)
112                 contents.time_seen = 0
113                 tracker.add(contents)
114                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
115                     print "    %i" % (len(tracker.shares),)
116             elif mode == 'verified_hash':
117                 known_verified.add(contents)
118             else:
119                 raise AssertionError()
120         print "    ...inserting %i verified shares..." % (len(known_verified),)
121         for h in known_verified:
122             if h not in tracker.shares:
123                 ss.forget_verified_share(h)
124                 continue
125             tracker.verified.add(tracker.shares[h])
126         print "    ...done loading %i shares!" % (len(tracker.shares),)
127         print
128         tracker.added.watch(lambda share: ss.add_share(share))
129         tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
130         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
131         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
132         tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
133         
134         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
135         
136         # information affecting work that should trigger a long-polling update
137         current_work = variable.Variable(None)
138         # information affecting work that should not trigger a long-polling update
139         current_work2 = variable.Variable(None)
140         
141         work_updated = variable.Event()
142         
143         requested = expiring_dict.ExpiringDict(300)
144         
145         @defer.inlineCallbacks
146         def set_real_work1():
147             work = yield getwork(bitcoind)
148             changed = work['previous_block_hash'] != current_work.value['previous_block'] if current_work.value is not None else True
149             current_work.set(dict(
150                 version=work['version'],
151                 previous_block=work['previous_block_hash'],
152                 target=work['target'],
153                 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
154                 aux_work=current_work.value['aux_work'] if current_work.value is not None else None,
155             ))
156             current_work2.set(dict(
157                 time=work['time'],
158                 transactions=work['transactions'],
159                 subsidy=work['subsidy'],
160                 clock_offset=time.time() - work['time'],
161                 last_update=time.time(),
162             ))
163             if changed:
164                 set_real_work2()
165         
166         def set_real_work2():
167             best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
168             
169             t = dict(current_work.value)
170             t['best_share_hash'] = best
171             current_work.set(t)
172             
173             t = time.time()
174             for peer2, share_hash in desired:
175                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
176                     continue
177                 last_request_time, count = requested.get(share_hash, (None, 0))
178                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
179                     continue
180                 potential_peers = set()
181                 for head in tracker.tails[share_hash]:
182                     potential_peers.update(peer_heads.get(head, set()))
183                 potential_peers = [peer for peer in potential_peers if peer.connected2]
184                 if count == 0 and peer2 is not None and peer2.connected2:
185                     peer = peer2
186                 else:
187                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
188                     if peer is None:
189                         continue
190                 
191                 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
192                 peer.send_getshares(
193                     hashes=[share_hash],
194                     parents=2000,
195                     stops=list(set(tracker.heads) | set(
196                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
197                     ))[:100],
198                 )
199                 requested[share_hash] = t, count + 1
200         
201         print 'Initializing work...'
202         yield set_real_work1()
203         set_real_work2()
204         print '    ...success!'
205         print
206         
207         @defer.inlineCallbacks
208         def set_merged_work():
209             if not args.merged_url:
210                 return
211             while True:
212                 merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
213                 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged.rpc_getauxblock)()
214                 x = dict(current_work.value)
215                 x['aux_work'] = dict(
216                     hash=int(auxblock['hash'], 16),
217                     target=bitcoin_data.HashType().unpack(auxblock['target'].decode('hex')),
218                     chain_id=auxblock['chainid'],
219                 )
220                 #print x['aux_work']
221                 current_work.set(x)
222                 yield deferral.sleep(1)
223         set_merged_work()
224         
225         start_time = time.time() - current_work2.value['clock_offset']
226         
227         # setup p2p logic and join p2pool network
228         
229         def p2p_shares(shares, peer=None):
230             if len(shares) > 5:
231                 print 'Processing %i shares...' % (len(shares),)
232             
233             new_count = 0
234             for share in shares:
235                 if share.hash in tracker.shares:
236                     #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
237                     continue
238                 
239                 new_count += 1
240                 
241                 #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
242                 
243                 tracker.add(share)
244             
245             if shares and peer is not None:
246                 peer_heads.setdefault(shares[0].hash, set()).add(peer)
247             
248             if new_count:
249                 set_real_work2()
250             
251             if len(shares) > 5:
252                 print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
253         
254         @tracker.verified.added.watch
255         def _(share):
256             if share.pow_hash <= share.header['target']:
257                 if factory.conn.value is not None:
258                     factory.conn.value.send_block(block=share.as_block(tracker, net))
259                 else:
260                     print 'No bitcoind connection! Erp!'
261                 print
262                 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool_data.format_hash(share.hash), share.header_hash,)
263                 print
264         
265         def p2p_share_hashes(share_hashes, peer):
266             t = time.time()
267             get_hashes = []
268             for share_hash in share_hashes:
269                 if share_hash in tracker.shares:
270                     continue
271                 last_request_time, count = requested.get(share_hash, (None, 0))
272                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
273                     continue
274                 print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
275                 get_hashes.append(share_hash)
276                 requested[share_hash] = t, count + 1
277             
278             if share_hashes and peer is not None:
279                 peer_heads.setdefault(share_hashes[0], set()).add(peer)
280             if get_hashes:
281                 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
282         
283         def p2p_get_shares(share_hashes, parents, stops, peer):
284             parents = min(parents, 1000//len(share_hashes))
285             stops = set(stops)
286             shares = []
287             for share_hash in share_hashes:
288                 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
289                     if share.hash in stops:
290                         break
291                     shares.append(share)
292             print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
293             peer.sendShares(shares)
294         
295         print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
296         
297         def parse(x):
298             if ':' in x:
299                 ip, port = x.split(':')
300                 return ip, int(port)
301             else:
302                 return x, net.P2P_PORT
303         
304         nodes = set([
305             ('72.14.191.28', net.P2P_PORT),
306             ('62.204.197.159', net.P2P_PORT),
307             ('142.58.248.28', net.P2P_PORT),
308             ('94.23.34.145', net.P2P_PORT),
309         ])
310         for host in [
311             'p2pool.forre.st',
312             'dabuttonfactory.com',
313         ]:
314             try:
315                 nodes.add(((yield reactor.resolve(host)), net.P2P_PORT))
316             except:
317                 log.err(None, 'Error resolving bootstrap node IP:')
318         
319         if net.NAME == 'litecoin':
320             nodes.add(((yield reactor.resolve('liteco.in')), net.P2P_PORT))
321         
322         p2p_node = p2p.Node(
323             current_work=current_work,
324             port=args.p2pool_port,
325             net=net,
326             addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), net.NAME),
327             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
328         )
329         p2p_node.handle_shares = p2p_shares
330         p2p_node.handle_share_hashes = p2p_share_hashes
331         p2p_node.handle_get_shares = p2p_get_shares
332         
333         p2p_node.start()
334         
335         # send share when the chain changes to their chain
336         def work_changed(new_work):
337             #print 'Work changed:', new_work
338             shares = []
339             for share in tracker.get_chain_known(new_work['best_share_hash']):
340                 if share.hash in shared_share_hashes:
341                     break
342                 shared_share_hashes.add(share.hash)
343                 shares.append(share)
344             
345             for peer in p2p_node.peers.itervalues():
346                 peer.sendShares([share for share in shares if share.peer is not peer])
347         
348         current_work.changed.watch(work_changed)
349         
350         print '    ...success!'
351         print
352         
353         @defer.inlineCallbacks
354         def upnp_thread():
355             while True:
356                 try:
357                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
358                     if is_lan:
359                         pm = yield portmapper.get_port_mapper()
360                         yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
361                 except defer.TimeoutError:
362                     pass
363                 except:
364                     if p2pool.DEBUG:
365                         log.err(None, "UPnP error:")
366                 yield deferral.sleep(random.expovariate(1/120))
367         
368         if args.upnp:
369             upnp_thread()
370         
371         # start listening for workers with a JSON-RPC server
372         
373         print 'Listening for workers on port %i...' % (args.worker_port,)
374         
375         # setup worker logic
376         
377         merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
378         run_identifier = struct.pack('<I', random.randrange(2**32))
379         
380         share_counter = skiplists.CountsSkipList(tracker, run_identifier)
381         removed_unstales = set()
382         def get_share_counts(doa=False):
383             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
384             matching_in_chain = share_counter(current_work.value['best_share_hash'], height) | removed_unstales
385             shares_in_chain = my_shares & matching_in_chain
386             stale_shares = my_shares - matching_in_chain
387             if doa:
388                 stale_doa_shares = stale_shares & doa_shares
389                 stale_not_doa_shares = stale_shares - stale_doa_shares
390                 return len(shares_in_chain) + len(stale_shares), len(stale_doa_shares), len(stale_not_doa_shares)
391             return len(shares_in_chain) + len(stale_shares), len(stale_shares)
392         @tracker.verified.removed.watch
393         def _(share):
394             if share.hash in my_shares and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
395                 removed_unstales.add(share.hash)
396         
397         
398         def get_payout_script_from_username(request):
399             user = worker_interface.get_username(request)
400             if user is None:
401                 return None
402             try:
403                 return bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(user, net))
404             except: # XXX blah
405                 return None
406         
407         def compute(request):
408             state = current_work.value
409             payout_script = get_payout_script_from_username(request)
410             if payout_script is None or random.uniform(0, 100) < args.worker_fee:
411                 payout_script = my_script
412             if state['best_share_hash'] is None and net.PERSIST:
413                 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
414             if len(p2p_node.peers) == 0 and net.PERSIST:
415                 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
416             if time.time() > current_work2.value['last_update'] + 60:
417                 raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
418             
419             if state['aux_work'] is not None:
420                 aux_str = '\xfa\xbemm' + bitcoin_data.HashType().pack(state['aux_work']['hash'])[::-1] + struct.pack('<ii', 1, 0)
421             else:
422                 aux_str = ''
423             
424             # XXX assuming generate_tx is smallish here..
425             def get_stale_frac():
426                 shares, stale_shares = get_share_counts()
427                 if shares == 0:
428                     return ""
429                 frac = stale_shares/shares
430                 return 2*struct.pack('<H', int(65535*frac + .5))
431             subsidy = current_work2.value['subsidy']
432             
433             
434             timestamp = current_work2.value['time']
435             previous_share = tracker.shares[state['best_share_hash']] if state['best_share_hash'] is not None else None
436             share_info, generate_tx = p2pool_data.generate_transaction(
437                 tracker=tracker,
438                 share_data=dict(
439                     previous_share_hash=state['best_share_hash'],
440                     coinbase=aux_str,
441                     nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
442                     new_script=payout_script,
443                     subsidy=subsidy,
444                     donation=math.perfect_round(65535*args.donation_percentage/100),
445                     stale_frac=(lambda shares, stales:
446                         255 if shares == 0 else math.perfect_round(254*stales/shares)
447                     )(*get_share_counts()),
448                 ),
449                 block_target=state['target'],
450                 desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
451                 net=net,
452             )
453             
454             print 'New work for worker! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (bitcoin_data.target_to_difficulty(share_info['target']), (sum(t['value'] for t in generate_tx['tx_outs'] if t['script'] == payout_script) -subsidy//200)*1e-8, net.BITCOIN_SYMBOL, subsidy*1e-8, net.BITCOIN_SYMBOL, len(current_work2.value['transactions']))
455             #print 'Target: %x' % (p2pool_data.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
456             #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
457             transactions = [generate_tx] + list(current_work2.value['transactions'])
458             merkle_root = bitcoin_data.merkle_hash(transactions)
459             merkle_root_to_transactions[merkle_root] = share_info, transactions
460             
461             target2 = share_info['target']
462             times[merkle_root] = time.time()
463             #print 'SENT', 2**256//p2pool_data.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
464             return bitcoin_getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2), state['best_share_hash']
465         
466         my_shares = set()
467         doa_shares = set()
468         times = {}
469         
470         def got_response(header, request):
471             try:
472                 user = worker_interface.get_username(request)
473                 # match up with transactions
474                 xxx = merkle_root_to_transactions.get(header['merkle_root'], None)
475                 if xxx is None:
476                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
477                     return False
478                 share_info, transactions = xxx
479                 
480                 hash_ = bitcoin_data.block_header_type.hash256(header)
481                 
482                 pow_hash = net.BITCOIN_POW_FUNC(header)
483                 
484                 if pow_hash <= header['target'] or p2pool.DEBUG:
485                     if factory.conn.value is not None:
486                         factory.conn.value.send_block(block=dict(header=header, txs=transactions))
487                     else:
488                         print 'No bitcoind connection! Erp!'
489                     if pow_hash <= header['target']:
490                         print
491                         print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
492                         print
493                 
494                 if current_work.value['aux_work'] is not None and pow_hash <= current_work.value['aux_work']['target']:
495                     try:
496                         aux_pow = dict(
497                             merkle_tx=dict(
498                                 tx=transactions[0],
499                                 block_hash=hash_,
500                                 merkle_branch=[x['hash'] for x in p2pool_data.calculate_merkle_branch(transactions, 0)],
501                                 index=0,
502                             ),
503                             merkle_branch=[],
504                             index=0,
505                             parent_block_header=header,
506                         )
507                         
508                         a, b = transactions[0]['tx_ins'][0]['script'][-32-8:-8].encode('hex'), bitcoin_data.aux_pow_type.pack(aux_pow).encode('hex')
509                         #print a, b
510                         merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
511                         def _(res):
512                             print "MERGED RESULT:", res
513                         merged.rpc_getauxblock(a, b).addBoth(_)
514                     except:
515                         log.err(None, 'Error while processing merged mining POW:')
516                 
517                 target = share_info['target']
518                 if pow_hash > target:
519                     print 'Worker submitted share with hash > target:\nhash  : %x\ntarget: %x' % (pow_hash, target)
520                     return False
521                 share = p2pool_data.Share(net, header, share_info, other_txs=transactions[1:])
522                 my_shares.add(share.hash)
523                 if share.previous_hash != current_work.value['best_share_hash']:
524                     doa_shares.add(share.hash)
525                 print 'GOT SHARE! %s %s prev %s age %.2fs' % (user, p2pool_data.format_hash(share.hash), p2pool_data.format_hash(share.previous_hash), time.time() - times[header['merkle_root']]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
526                 good = share.previous_hash == current_work.value['best_share_hash']
527                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
528                 p2p_shares([share])
529                 # eg. good = share.hash == current_work.value['best_share_hash'] here
530                 return good
531             except:
532                 log.err(None, 'Error processing data received from worker:')
533                 return False
534         
535         web_root = worker_interface.WorkerInterface(compute, got_response, current_work.changed)
536         
537         def get_rate():
538             if current_work.value['best_share_hash'] is not None:
539                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
540                 att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], net, min(height - 1, 720))
541                 fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
542                 return json.dumps(int(att_s / (1. - (math.median(fracs) if fracs else 0))))
543             return json.dumps(None)
544         
545         def get_users():
546             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
547             weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
548             res = {}
549             for script in sorted(weights, key=lambda s: weights[s]):
550                 res[bitcoin_data.script2_to_human(script, net)] = weights[script]/total_weight
551             return json.dumps(res)
552         
553         class WebInterface(resource.Resource):
554             def __init__(self, func, mime_type):
555                 self.func, self.mime_type = func, mime_type
556             
557             def render_GET(self, request):
558                 request.setHeader('Content-Type', self.mime_type)
559                 return self.func()
560         
561         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
562         web_root.putChild('users', WebInterface(get_users, 'application/json'))
563         web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
564         if draw is not None:
565             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
566         
567         reactor.listenTCP(args.worker_port, server.Site(web_root))
568         
569         print '    ...success!'
570         print
571         
572         # done!
573         
574         # do new getwork when a block is heard on the p2p interface
575         
576         def new_block(block_hash):
577             work_updated.happened()
578         factory.new_block.watch(new_block)
579         
580         print 'Started successfully!'
581         print
582         
583         ht.updated.watch(set_real_work2)
584         
585         @defer.inlineCallbacks
586         def work1_thread():
587             while True:
588                 flag = work_updated.get_deferred()
589                 try:
590                     yield set_real_work1()
591                 except:
592                     log.err()
593                 yield defer.DeferredList([flag, deferral.sleep(random.uniform(1, 10))], fireOnOneCallback=True)
594         
595         @defer.inlineCallbacks
596         def work2_thread():
597             while True:
598                 try:
599                     set_real_work2()
600                 except:
601                     log.err()
602                 yield deferral.sleep(random.expovariate(1/20))
603         
604         work1_thread()
605         work2_thread()
606         
607         
608         if hasattr(signal, 'SIGALRM'):
609             def watchdog_handler(signum, frame):
610                 print 'Watchdog timer went off at:'
611                 traceback.print_stack()
612             
613             signal.signal(signal.SIGALRM, watchdog_handler)
614             task.LoopingCall(signal.alarm, 30).start(1)
615         
616         
617         pool_str = None;
618         while True:
619             yield deferral.sleep(3)
620             try:
621                 if time.time() > current_work2.value['last_update'] + 60:
622                     print '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
623                 if current_work.value['best_share_hash'] is not None:
624                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
625                     if height > 2:
626                         att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], net, min(height - 1, 720))
627                         weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
628                         shares, stale_doa_shares, stale_not_doa_shares = get_share_counts(True)
629                         stale_shares = stale_doa_shares + stale_not_doa_shares
630                         fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
631                         str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i' % (
632                             math.format(int(att_s / (1. - (math.median(fracs) if fracs else 0)))),
633                             height,
634                             len(tracker.verified.shares),
635                             len(tracker.shares),
636                             weights.get(my_script, 0)/total_weight*100,
637                             math.format(int(weights.get(my_script, 0)*att_s//total_weight / (1. - (math.median(fracs) if fracs else 0)))),
638                             shares,
639                             stale_not_doa_shares,
640                             stale_doa_shares,
641                             len(p2p_node.peers),
642                         ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
643                         if (str != pool_str):
644                             print str;
645                             pool_str = str;
646                         if fracs:
647                             med = math.median(fracs)
648                             print 'Median stale proportion:', med
649                             if shares:
650                                 print '    Own:', stale_shares/shares
651                                 if med < .99:
652                                     print '    Own efficiency: %.02f%%' % (100*(1 - stale_shares/shares)/(1 - med),)
653             
654             
655             except:
656                 log.err()
657     except:
658         log.err(None, 'Fatal error:')
659     finally:
660         reactor.stop()
661
662 def run():
663     class FixedArgumentParser(argparse.ArgumentParser):
664         def _read_args_from_files(self, arg_strings):
665             # expand arguments referencing files
666             new_arg_strings = []
667             for arg_string in arg_strings:
668                 
669                 # for regular arguments, just add them back into the list
670                 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
671                     new_arg_strings.append(arg_string)
672                 
673                 # replace arguments referencing files with the file content
674                 else:
675                     try:
676                         args_file = open(arg_string[1:])
677                         try:
678                             arg_strings = []
679                             for arg_line in args_file.read().splitlines():
680                                 for arg in self.convert_arg_line_to_args(arg_line):
681                                     arg_strings.append(arg)
682                             arg_strings = self._read_args_from_files(arg_strings)
683                             new_arg_strings.extend(arg_strings)
684                         finally:
685                             args_file.close()
686                     except IOError:
687                         err = sys.exc_info()[1]
688                         self.error(str(err))
689             
690             # return the modified argument list
691             return new_arg_strings
692         
693         def convert_arg_line_to_args(self, arg_line):
694             return [arg for arg in arg_line.split() if arg.strip()]
695     
696     parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
697     parser.add_argument('--version', action='version', version=p2pool.__version__)
698     parser.add_argument('--net',
699         help='use specified network (default: bitcoin)',
700         action='store', choices=sorted(networks.realnets), default='bitcoin', dest='net_name')
701     parser.add_argument('--testnet',
702         help='''use the network's testnet''',
703         action='store_const', const=True, default=False, dest='testnet')
704     parser.add_argument('--debug',
705         help='debugging mode',
706         action='store_const', const=True, default=False, dest='debug')
707     parser.add_argument('-a', '--address',
708         help='generate to this address (defaults to requesting one from bitcoind)',
709         type=str, action='store', default=None, dest='address')
710     parser.add_argument('--logfile',
711         help='''log to specific file (defaults to <network_name>.log in run_p2pool.py's directory)''',
712         type=str, action='store', default=None, dest='logfile')
713     parser.add_argument('--merged-url',
714         help='call getauxblock on this url to get work for merged mining',
715         type=str, action='store', default=None, dest='merged_url')
716     parser.add_argument('--merged-userpass',
717         help='merge daemon user and password, separated by a colon. Example: ncuser:ncpass',
718         type=str, action='store', default=None, dest='merged_userpass')
719     parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
720         help='percentage amount to donate to author of p2pool. Default: 0.5',
721         type=float, action='store', default=0.5, dest='donation_percentage')
722     
723     p2pool_group = parser.add_argument_group('p2pool interface')
724     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
725         help='use TCP port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.P2P_PORT) for _, n in sorted(networks.realnets.items())),
726         type=int, action='store', default=None, dest='p2pool_port')
727     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
728         help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to default p2pool P2P port), in addition to builtin addresses',
729         type=str, action='append', default=[], dest='p2pool_nodes')
730     parser.add_argument('--disable-upnp',
731         help='''don't attempt to forward p2pool P2P port from the WAN to this computer using UPnP''',
732         action='store_false', default=True, dest='upnp')
733     
734     worker_group = parser.add_argument_group('worker interface')
735     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
736         help='listen on PORT for RPC connections from miners asking for work and providing responses (default:%s)' % ', '.join('%s:%i' % (n.NAME, n.WORKER_PORT) for _, n in sorted(networks.realnets.items())),
737         type=int, action='store', default=None, dest='worker_port')
738     worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
739         help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee . default: 0''',
740         type=float, action='store', default=0, dest='worker_fee')
741     
742     bitcoind_group = parser.add_argument_group('bitcoind interface')
743     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
744         help='connect to a bitcoind at this address (default: 127.0.0.1)',
745         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
746     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
747         help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getmemorypool (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.BITCOIN_RPC_PORT) for _, n in sorted(networks.realnets.items())),
748         type=int, action='store', default=None, dest='bitcoind_rpc_port')
749     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
750         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.BITCOIN_P2P_PORT) for _, n in sorted(networks.realnets.items())),
751         type=int, action='store', default=None, dest='bitcoind_p2p_port')
752     
753     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSER',
754         help='bitcoind RPC interface username (default: empty)',
755         type=str, action='store', default='', nargs='?', dest='bitcoind_rpc_username')
756     bitcoind_group.add_argument(metavar='BITCOIND_RPCPASSWORD',
757         help='bitcoind RPC interface password',
758         type=str, action='store', dest='bitcoind_rpc_password')
759     
760     args = parser.parse_args()
761     
762     if args.debug:
763         p2pool.DEBUG = True
764     
765     net = networks.nets[args.net_name + ('_testnet' if args.testnet else '')]
766     
767     if args.logfile is None:
768         args.logfile = os.path.join(os.path.dirname(sys.argv[0]), net.NAME + '.log')
769     
770     class LogFile(object):
771         def __init__(self, filename):
772             self.filename = filename
773             self.inner_file = None
774             self.reopen()
775         def reopen(self):
776             if self.inner_file is not None:
777                 self.inner_file.close()
778             open(self.filename, 'a').close()
779             f = open(self.filename, 'rb')
780             f.seek(0, os.SEEK_END)
781             length = f.tell()
782             if length > 100*1000*1000:
783                 f.seek(-1000*1000, os.SEEK_END)
784                 while True:
785                     if f.read(1) in ('', '\n'):
786                         break
787                 data = f.read()
788                 f.close()
789                 f = open(self.filename, 'wb')
790                 f.write(data)
791             f.close()
792             self.inner_file = open(self.filename, 'a')
793         def write(self, data):
794             self.inner_file.write(data)
795         def flush(self):
796             self.inner_file.flush()
797     class TeePipe(object):
798         def __init__(self, outputs):
799             self.outputs = outputs
800         def write(self, data):
801             for output in self.outputs:
802                 output.write(data)
803         def flush(self):
804             for output in self.outputs:
805                 output.flush()
806     class TimestampingPipe(object):
807         def __init__(self, inner_file):
808             self.inner_file = inner_file
809             self.buf = ''
810             self.softspace = 0
811         def write(self, data):
812             buf = self.buf + data
813             lines = buf.split('\n')
814             for line in lines[:-1]:
815                 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
816                 self.inner_file.flush()
817             self.buf = lines[-1]
818         def flush(self):
819             pass
820     logfile = LogFile(args.logfile)
821     sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
822     if hasattr(signal, "SIGUSR1"):
823         def sigusr1(signum, frame):
824             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
825             logfile.reopen()
826             print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
827         signal.signal(signal.SIGUSR1, sigusr1)
828     task.LoopingCall(logfile.reopen).start(5)
829     
830     if args.bitcoind_rpc_port is None:
831         args.bitcoind_rpc_port = net.BITCOIN_RPC_PORT
832     
833     if args.bitcoind_p2p_port is None:
834         args.bitcoind_p2p_port = net.BITCOIN_P2P_PORT
835     
836     if args.p2pool_port is None:
837         args.p2pool_port = net.P2P_PORT
838     
839     if args.worker_port is None:
840         args.worker_port = net.WORKER_PORT
841     
842     if args.address is not None:
843         try:
844             args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net)
845         except Exception, e:
846             parser.error('error parsing address: ' + repr(e))
847     else:
848         args.pubkey_hash = None
849     
850     if (args.merged_url is None) ^ (args.merged_userpass is None):
851         parser.error('must specify --merged-url and --merged-userpass')
852     
853     reactor.callWhenRunning(main, args, net)
854     reactor.run()