regularized naming of bitcoin imports in p2pool.main
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2
3 from __future__ import division
4
5 import argparse
6 import datetime
7 import itertools
8 import os
9 import random
10 import sqlite3
11 import struct
12 import sys
13 import time
14 import json
15 import signal
16 import traceback
17
18 from twisted.internet import defer, reactor, task
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
22
23 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
24 from bitcoin import worker_interface
25 from util import db, expiring_dict, jsonrpc, variable, deferral, math
26 from . import p2p, skiplists
27 import p2pool, p2pool.data as p2pool_data
28
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind, ht, net):
32     work = yield bitcoind.rpc_getmemorypool()
33     defer.returnValue(dict(
34         version=work['version'],
35         previous_block_hash=int(work['previousblockhash'], 16),
36         transactions=[bitcoin_data.tx_type.unpack(x.decode('hex')) for x in work['transactions']],
37         subsidy=work['coinbasevalue'],
38         time=work['time'],
39         target=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
40     ))
41
42 @deferral.retry('Error getting payout script from bitcoind:', 1)
43 @defer.inlineCallbacks
44 def get_payout_script(factory):
45     res = yield (yield factory.getProtocol()).check_order(order=bitcoin_p2p.Protocol.null_order)
46     if res['reply'] == 'success':
47         defer.returnValue(res['script'])
48     elif res['reply'] == 'denied':
49         defer.returnValue(None)
50     else:
51         raise ValueError('Unexpected reply: %r' % (res,))
52
53 @deferral.retry('Error creating payout script:', 10)
54 @defer.inlineCallbacks
55 def get_payout_script2(bitcoind, net):
56     defer.returnValue(bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
57
58 @defer.inlineCallbacks
59 def main(args):
60     try:
61         print 'p2pool (version %s)' % (p2pool.__version__,)
62         print
63         try:
64             from . import draw
65         except ImportError:
66             draw = None
67             print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
68             print
69         
70         # connect to bitcoind over JSON-RPC and do initial getwork
71         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
72         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
73         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
74         good = yield deferral.retry('Error while checking bitcoind identity:', 1)(args.net.BITCOIN_RPC_CHECK)(bitcoind)
75         if not good:
76             print "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
77             return
78         temp_work = yield deferral.retry('Error while testing getwork:', 1)(defer.inlineCallbacks(lambda: defer.returnValue(bitcoin_getwork.BlockAttempt.from_getwork((yield bitcoind.rpc_getwork())))))()
79         print '    ...success!'
80         print '    Current block hash: %x' % (temp_work.previous_block,)
81         print
82         
83         # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
84         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
85         factory = bitcoin_p2p.ClientFactory(args.net)
86         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
87         my_script = yield get_payout_script(factory)
88         if args.pubkey_hash is None:
89             if my_script is None:
90                 print '    IP transaction denied ... falling back to sending to address.'
91                 my_script = yield get_payout_script2(bitcoind, args.net)
92         else:
93             my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
94         print '    ...success!'
95         print '    Payout script:', bitcoin_data.script2_to_human(my_script, args.net)
96         print
97         
98         print 'Loading cached block headers...'
99         ht = bitcoin_p2p.HeightTracker(factory, args.net.NAME + '_headers.dat')
100         print '   ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
101         print
102         
103         tracker = p2pool_data.OkayTracker(args.net)
104         shared_share_hashes = set()
105         ss = p2pool_data.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.NAME + '_shares.'), args.net)
106         known_verified = set()
107         print "Loading shares..."
108         for i, (mode, contents) in enumerate(ss.get_shares()):
109             if mode == 'share':
110                 if contents.hash in tracker.shares:
111                     continue
112                 shared_share_hashes.add(contents.hash)
113                 contents.time_seen = 0
114                 tracker.add(contents)
115                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
116                     print "    %i" % (len(tracker.shares),)
117             elif mode == 'verified_hash':
118                 known_verified.add(contents)
119             else:
120                 raise AssertionError()
121         print "    ...inserting %i verified shares..." % (len(known_verified),)
122         for h in known_verified:
123             if h not in tracker.shares:
124                 ss.forget_verified_share(h)
125                 continue
126             tracker.verified.add(tracker.shares[h])
127         print "    ...done loading %i shares!" % (len(tracker.shares),)
128         print
129         tracker.added.watch(lambda share: ss.add_share(share))
130         tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
131         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
132         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
133         tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
134         
135         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
136         
137         # information affecting work that should trigger a long-polling update
138         current_work = variable.Variable(None)
139         # information affecting work that should not trigger a long-polling update
140         current_work2 = variable.Variable(None)
141         
142         work_updated = variable.Event()
143         
144         requested = expiring_dict.ExpiringDict(300)
145         
146         @defer.inlineCallbacks
147         def set_real_work1():
148             work = yield getwork(bitcoind, ht, args.net)
149             changed = work['previous_block_hash'] != current_work.value['previous_block'] if current_work.value is not None else True
150             current_work.set(dict(
151                 version=work['version'],
152                 previous_block=work['previous_block_hash'],
153                 target=work['target'],
154                 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
155                 aux_work=current_work.value['aux_work'] if current_work.value is not None else None,
156             ))
157             current_work2.set(dict(
158                 time=work['time'],
159                 transactions=work['transactions'],
160                 subsidy=work['subsidy'],
161                 clock_offset=time.time() - work['time'],
162                 last_update=time.time(),
163             ))
164             if changed:
165                 set_real_work2()
166         
167         def set_real_work2():
168             best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
169             
170             t = dict(current_work.value)
171             t['best_share_hash'] = best
172             current_work.set(t)
173             
174             t = time.time()
175             for peer2, share_hash in desired:
176                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
177                     continue
178                 last_request_time, count = requested.get(share_hash, (None, 0))
179                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
180                     continue
181                 potential_peers = set()
182                 for head in tracker.tails[share_hash]:
183                     potential_peers.update(peer_heads.get(head, set()))
184                 potential_peers = [peer for peer in potential_peers if peer.connected2]
185                 if count == 0 and peer2 is not None and peer2.connected2:
186                     peer = peer2
187                 else:
188                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
189                     if peer is None:
190                         continue
191                 
192                 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
193                 peer.send_getshares(
194                     hashes=[share_hash],
195                     parents=2000,
196                     stops=list(set(tracker.heads) | set(
197                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
198                     ))[:100],
199                 )
200                 requested[share_hash] = t, count + 1
201         
202         print 'Initializing work...'
203         yield set_real_work1()
204         set_real_work2()
205         print '    ...success!'
206         print
207         
208         @defer.inlineCallbacks
209         def set_merged_work():
210             if not args.merged_url:
211                 return
212             while True:
213                 merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
214                 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged.rpc_getauxblock)()
215                 x = dict(current_work.value)
216                 x['aux_work'] = dict(
217                     hash=int(auxblock['hash'], 16),
218                     target=bitcoin_data.HashType().unpack(auxblock['target'].decode('hex')),
219                     chain_id=auxblock['chainid'],
220                 )
221                 #print x['aux_work']
222                 current_work.set(x)
223                 yield deferral.sleep(1)
224         set_merged_work()
225         
226         start_time = time.time() - current_work2.value['clock_offset']
227         
228         # setup p2p logic and join p2pool network
229         
230         def share_share(share, ignore_peer=None):
231             for peer in p2p_node.peers.itervalues():
232                 if peer is ignore_peer:
233                     continue
234                 #if p2pool.DEBUG:
235                 #    print "Sending share %s to %r" % (p2pool_data.format_hash(share.hash), peer.addr)
236                 peer.sendShares([share])
237             shared_share_hashes.add(share.hash)
238         
239         def p2p_shares(shares, peer=None):
240             if len(shares) > 5:
241                 print 'Processing %i shares...' % (len(shares),)
242             
243             new_count = 0
244             for share in shares:
245                 if share.hash in tracker.shares:
246                     #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
247                     continue
248                 
249                 new_count += 1
250                 
251                 #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
252                 
253                 tracker.add(share)
254             
255             if shares and peer is not None:
256                 peer_heads.setdefault(shares[0].hash, set()).add(peer)
257             
258             if new_count:
259                 set_real_work2()
260             
261             if len(shares) > 5:
262                 print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*args.net.CHAIN_LENGTH)
263         
264         @tracker.verified.added.watch
265         def _(share):
266             if share.pow_hash <= share.header['target']:
267                 if factory.conn.value is not None:
268                     factory.conn.value.send_block(block=share.as_block(tracker, args.net))
269                 else:
270                     print 'No bitcoind connection! Erp!'
271                 print
272                 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool_data.format_hash(share.hash), share.header_hash,)
273                 print
274         
275         def p2p_share_hashes(share_hashes, peer):
276             t = time.time()
277             get_hashes = []
278             for share_hash in share_hashes:
279                 if share_hash in tracker.shares:
280                     continue
281                 last_request_time, count = requested.get(share_hash, (None, 0))
282                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
283                     continue
284                 print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
285                 get_hashes.append(share_hash)
286                 requested[share_hash] = t, count + 1
287             
288             if share_hashes and peer is not None:
289                 peer_heads.setdefault(share_hashes[0], set()).add(peer)
290             if get_hashes:
291                 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
292         
293         def p2p_get_shares(share_hashes, parents, stops, peer):
294             parents = min(parents, 1000//len(share_hashes))
295             stops = set(stops)
296             shares = []
297             for share_hash in share_hashes:
298                 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
299                     if share.hash in stops:
300                         break
301                     shares.append(share)
302             print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
303             peer.sendShares(shares, full=True)
304         
305         print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
306         
307         def parse(x):
308             if ':' in x:
309                 ip, port = x.split(':')
310                 return ip, int(port)
311             else:
312                 return x, args.net.P2P_PORT
313         
314         nodes = set([
315             ('72.14.191.28', args.net.P2P_PORT),
316             ('62.204.197.159', args.net.P2P_PORT),
317             ('142.58.248.28', args.net.P2P_PORT),
318             ('94.23.34.145', args.net.P2P_PORT),
319         ])
320         for host in [
321             'p2pool.forre.st',
322             'dabuttonfactory.com',
323         ]:
324             try:
325                 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
326             except:
327                 log.err(None, 'Error resolving bootstrap node IP:')
328
329         if args.net_name == 'litecoin':
330             nodes.add(((yield reactor.resolve('liteco.in')), args.net.P2P_PORT))
331         
332         p2p_node = p2p.Node(
333             current_work=current_work,
334             port=args.p2pool_port,
335             net=args.net,
336             addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.NAME),
337             mode=0 if args.low_bandwidth else 1,
338             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
339         )
340         p2p_node.handle_shares = p2p_shares
341         p2p_node.handle_share_hashes = p2p_share_hashes
342         p2p_node.handle_get_shares = p2p_get_shares
343         
344         p2p_node.start()
345         
346         # send share when the chain changes to their chain
347         def work_changed(new_work):
348             #print 'Work changed:', new_work
349             for share in tracker.get_chain_known(new_work['best_share_hash']):
350                 if share.hash in shared_share_hashes:
351                     break
352                 share_share(share, share.peer)
353         current_work.changed.watch(work_changed)
354         
355         print '    ...success!'
356         print
357         
358         @defer.inlineCallbacks
359         def upnp_thread():
360             while True:
361                 try:
362                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
363                     if is_lan:
364                         pm = yield portmapper.get_port_mapper()
365                         yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
366                 except defer.TimeoutError:
367                     pass
368                 except:
369                     if p2pool.DEBUG:
370                         log.err(None, "UPnP error:")
371                 yield deferral.sleep(random.expovariate(1/120))
372         
373         if args.upnp:
374             upnp_thread()
375         
376         # start listening for workers with a JSON-RPC server
377         
378         print 'Listening for workers on port %i...' % (args.worker_port,)
379         
380         # setup worker logic
381         
382         merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
383         run_identifier = struct.pack('<I', random.randrange(2**32))
384         
385         share_counter = skiplists.CountsSkipList(tracker, run_identifier)
386         removed_unstales = set()
387         def get_share_counts(doa=False):
388             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
389             matching_in_chain = share_counter(current_work.value['best_share_hash'], height) | removed_unstales
390             shares_in_chain = my_shares & matching_in_chain
391             stale_shares = my_shares - matching_in_chain
392             if doa:
393                 stale_doa_shares = stale_shares & doa_shares
394                 stale_not_doa_shares = stale_shares - stale_doa_shares
395                 return len(shares_in_chain) + len(stale_shares), len(stale_doa_shares), len(stale_not_doa_shares)
396             return len(shares_in_chain) + len(stale_shares), len(stale_shares)
397         @tracker.verified.removed.watch
398         def _(share):
399             if share.hash in my_shares and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
400                 removed_unstales.add(share.hash)
401         
402         
403         def get_payout_script_from_username(request):
404             user = worker_interface.get_username(request)
405             if user is None:
406                 return None
407             try:
408                 return bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(user, args.net))
409             except: # XXX blah
410                 return None
411         
412         def compute(request):
413             state = current_work.value
414             payout_script = get_payout_script_from_username(request)
415             if payout_script is None or random.uniform(0, 100) < args.worker_fee:
416                 payout_script = my_script
417             if state['best_share_hash'] is None and args.net.PERSIST:
418                 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
419             if len(p2p_node.peers) == 0 and args.net.PERSIST:
420                 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
421             if time.time() > current_work2.value['last_update'] + 60:
422                 raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
423             
424             if state['aux_work'] is not None:
425                 aux_str = '\xfa\xbemm' + bitcoin_data.HashType().pack(state['aux_work']['hash'])[::-1] + struct.pack('<ii', 1, 0)
426             else:
427                 aux_str = ''
428             
429             # XXX assuming generate_tx is smallish here..
430             def get_stale_frac():
431                 shares, stale_shares = get_share_counts()
432                 if shares == 0:
433                     return ""
434                 frac = stale_shares/shares
435                 return 2*struct.pack('<H', int(65535*frac + .5))
436             subsidy = current_work2.value['subsidy']
437             
438             
439             timestamp = current_work2.value['time']
440             previous_share = tracker.shares[state['best_share_hash']] if state['best_share_hash'] is not None else None
441             new_share_info, generate_tx = p2pool_data.new_generate_transaction(
442                 tracker=tracker,
443                 new_share_data=dict(
444                     previous_share_hash=state['best_share_hash'],
445                     coinbase=aux_str,
446                     nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
447                     new_script=payout_script,
448                     subsidy=subsidy,
449                     donation=math.perfect_round(65535*args.donation_percentage/100),
450                     stale_frac=(lambda shares, stales:
451                         255 if shares == 0 else math.perfect_round(254*stales/shares)
452                     )(*get_share_counts()),
453                 ),
454                 block_target=state['target'],
455                 desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
456                 net=args.net,
457             )
458             
459             print 'New work for worker! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (bitcoin_data.target_to_difficulty(new_share_info['target']), (sum(t['value'] for t in generate_tx['tx_outs'] if t['script'] == payout_script) -subsidy//200)*1e-8, args.net.BITCOIN_SYMBOL, subsidy*1e-8, args.net.BITCOIN_SYMBOL, len(current_work2.value['transactions']))
460             #print 'Target: %x' % (p2pool_data.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
461             #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
462             transactions = [generate_tx] + list(current_work2.value['transactions'])
463             merkle_root = bitcoin_data.merkle_hash(transactions)
464             merkle_root_to_transactions[merkle_root] = new_share_info, transactions
465             
466             target2 = new_share_info['target']
467             times[merkle_root] = time.time()
468             #print 'SENT', 2**256//p2pool_data.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
469             return bitcoin_getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2), state['best_share_hash']
470         
471         my_shares = set()
472         doa_shares = set()
473         times = {}
474         
475         def got_response(header, request):
476             try:
477                 user = worker_interface.get_username(request)
478                 # match up with transactions
479                 xxx = merkle_root_to_transactions.get(header['merkle_root'], None)
480                 if xxx is None:
481                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
482                     return False
483                 share_info, transactions = xxx
484                 new_share_info = share_info
485                 
486                 hash_ = bitcoin_data.block_header_type.hash256(header)
487                 
488                 pow_hash = args.net.BITCOIN_POW_FUNC(header)
489                 
490                 if pow_hash <= header['target'] or p2pool.DEBUG:
491                     if factory.conn.value is not None:
492                         factory.conn.value.send_block(block=dict(header=header, txs=transactions))
493                     else:
494                         print 'No bitcoind connection! Erp!'
495                     if pow_hash <= header['target']:
496                         print
497                         print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
498                         print
499                 
500                 if current_work.value['aux_work'] is not None and pow_hash <= current_work.value['aux_work']['target']:
501                     try:
502                         aux_pow = dict(
503                             merkle_tx=dict(
504                                 tx=transactions[0],
505                                 block_hash=hash_,
506                                 merkle_branch=[x['hash'] for x in p2pool_data.calculate_merkle_branch(transactions, 0)],
507                                 index=0,
508                             ),
509                             merkle_branch=[],
510                             index=0,
511                             parent_block_header=header,
512                         )
513                         
514                         a, b = transactions[0]['tx_ins'][0]['script'][-32-8:-8].encode('hex'), bitcoin_data.aux_pow_type.pack(aux_pow).encode('hex')
515                         #print a, b
516                         merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
517                         def _(res):
518                             print "MERGED RESULT:", res
519                         merged.rpc_getauxblock(a, b).addBoth(_)
520                     except:
521                         log.err(None, 'Error while processing merged mining POW:')
522                 
523                 target = new_share_info['target']
524                 if pow_hash > target:
525                     print 'Worker submitted share with hash > target:\nhash  : %x\ntarget: %x' % (pow_hash, target)
526                     return False
527                 share = p2pool_data.NewShare(args.net, header, new_share_info, other_txs=transactions[1:])
528                 my_shares.add(share.hash)
529                 if share.previous_hash != current_work.value['best_share_hash']:
530                     doa_shares.add(share.hash)
531                 print 'GOT SHARE! %s %s prev %s age %.2fs' % (user, p2pool_data.format_hash(share.hash), p2pool_data.format_hash(share.previous_hash), time.time() - times[header['merkle_root']]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
532                 good = share.previous_hash == current_work.value['best_share_hash']
533                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
534                 p2p_shares([share])
535                 # eg. good = share.hash == current_work.value['best_share_hash'] here
536                 return good
537             except:
538                 log.err(None, 'Error processing data received from worker:')
539                 return False
540         
541         web_root = worker_interface.WorkerInterface(compute, got_response, current_work.changed)
542         
543         def get_rate():
544             if current_work.value['best_share_hash'] is not None:
545                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
546                 att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 720))
547                 fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
548                 return json.dumps(int(att_s / (1. - (math.median(fracs) if fracs else 0))))
549             return json.dumps(None)
550         
551         def get_users():
552             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
553             weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
554             res = {}
555             for script in sorted(weights, key=lambda s: weights[s]):
556                 res[bitcoin_data.script2_to_human(script, args.net)] = weights[script]/total_weight
557             return json.dumps(res)
558         
559         class WebInterface(resource.Resource):
560             def __init__(self, func, mime_type):
561                 self.func, self.mime_type = func, mime_type
562             
563             def render_GET(self, request):
564                 request.setHeader('Content-Type', self.mime_type)
565                 return self.func()
566         
567         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
568         web_root.putChild('users', WebInterface(get_users, 'application/json'))
569         web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
570         if draw is not None:
571             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
572         
573         reactor.listenTCP(args.worker_port, server.Site(web_root))
574         
575         print '    ...success!'
576         print
577         
578         # done!
579         
580         # do new getwork when a block is heard on the p2p interface
581         
582         def new_block(block_hash):
583             work_updated.happened()
584         factory.new_block.watch(new_block)
585         
586         print 'Started successfully!'
587         print
588         
589         ht.updated.watch(set_real_work2)
590         
591         @defer.inlineCallbacks
592         def work1_thread():
593             while True:
594                 flag = work_updated.get_deferred()
595                 try:
596                     yield set_real_work1()
597                 except:
598                     log.err()
599                 yield defer.DeferredList([flag, deferral.sleep(random.uniform(1, 10))], fireOnOneCallback=True)
600         
601         @defer.inlineCallbacks
602         def work2_thread():
603             while True:
604                 try:
605                     set_real_work2()
606                 except:
607                     log.err()
608                 yield deferral.sleep(random.expovariate(1/20))
609         
610         work1_thread()
611         work2_thread()
612         
613         
614         if hasattr(signal, 'SIGALRM'):
615             def watchdog_handler(signum, frame):
616                 print 'Watchdog timer went off at:'
617                 traceback.print_stack()
618             
619             signal.signal(signal.SIGALRM, watchdog_handler)
620             task.LoopingCall(signal.alarm, 30).start(1)
621         
622         
623         pool_str = None;
624         while True:
625             yield deferral.sleep(3)
626             try:
627                 if time.time() > current_work2.value['last_update'] + 60:
628                     print '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
629                 if current_work.value['best_share_hash'] is not None:
630                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
631                     if height > 2:
632                         att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 720))
633                         weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
634                         shares, stale_doa_shares, stale_not_doa_shares = get_share_counts(True)
635                         stale_shares = stale_doa_shares + stale_not_doa_shares
636                         fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
637                         str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i' % (
638                             math.format(int(att_s / (1. - (math.median(fracs) if fracs else 0)))),
639                             height,
640                             len(tracker.verified.shares),
641                             len(tracker.shares),
642                             weights.get(my_script, 0)/total_weight*100,
643                             math.format(int(weights.get(my_script, 0)*att_s//total_weight / (1. - (math.median(fracs) if fracs else 0)))),
644                             shares,
645                             stale_not_doa_shares,
646                             stale_doa_shares,
647                             len(p2p_node.peers),
648                         ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
649                         if (str != pool_str):
650                             print str;
651                             pool_str = str;
652                         if fracs:
653                             med = math.median(fracs)
654                             print 'Median stale proportion:', med
655                             if shares:
656                                 print '    Own:', stale_shares/shares
657                                 if med < .99:
658                                     print '    Own efficiency: %.02f%%' % (100*(1 - stale_shares/shares)/(1 - med),)
659             
660             
661             except:
662                 log.err()
663     except:
664         log.err(None, 'Fatal error:')
665     finally:
666         reactor.stop()
667
668 def run():
669     class FixedArgumentParser(argparse.ArgumentParser):
670         def _read_args_from_files(self, arg_strings):
671             # expand arguments referencing files
672             new_arg_strings = []
673             for arg_string in arg_strings:
674                 
675                 # for regular arguments, just add them back into the list
676                 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
677                     new_arg_strings.append(arg_string)
678                 
679                 # replace arguments referencing files with the file content
680                 else:
681                     try:
682                         args_file = open(arg_string[1:])
683                         try:
684                             arg_strings = []
685                             for arg_line in args_file.read().splitlines():
686                                 for arg in self.convert_arg_line_to_args(arg_line):
687                                     arg_strings.append(arg)
688                             arg_strings = self._read_args_from_files(arg_strings)
689                             new_arg_strings.extend(arg_strings)
690                         finally:
691                             args_file.close()
692                     except IOError:
693                         err = sys.exc_info()[1]
694                         self.error(str(err))
695             
696             # return the modified argument list
697             return new_arg_strings
698         
699         def convert_arg_line_to_args(self, arg_line):
700             return [arg for arg in arg_line.split() if arg.strip()]
701     
702     parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
703     parser.add_argument('--version', action='version', version=p2pool.__version__)
704     parser.add_argument('--net',
705         help='use specified network (default: bitcoin)',
706         action='store', choices=sorted(x for x in p2pool_data.nets if 'testnet' not in x), default='bitcoin', dest='net_name')
707     parser.add_argument('--testnet',
708         help='''use the network's testnet''',
709         action='store_const', const=True, default=False, dest='testnet')
710     parser.add_argument('--debug',
711         help='debugging mode',
712         action='store_const', const=True, default=False, dest='debug')
713     parser.add_argument('-a', '--address',
714         help='generate to this address (defaults to requesting one from bitcoind)',
715         type=str, action='store', default=None, dest='address')
716     parser.add_argument('--logfile',
717         help='''log to specific file (defaults to <network_name>.log in run_p2pool.py's directory)''',
718         type=str, action='store', default=None, dest='logfile')
719     parser.add_argument('--merged-url',
720         help='call getauxblock on this url to get work for merged mining',
721         type=str, action='store', default=None, dest='merged_url')
722     parser.add_argument('--merged-userpass',
723         help='merge daemon user and password, separated by a colon. Example: ncuser:ncpass',
724         type=str, action='store', default=None, dest='merged_userpass')
725     parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
726         help='percentage amount to donate to author of p2pool. Default: 0.5',
727         type=float, action='store', default=0.5, dest='donation_percentage')
728     
729     p2pool_group = parser.add_argument_group('p2pool interface')
730     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
731         help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
732         type=int, action='store', default=None, dest='p2pool_port')
733     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
734         help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
735         type=str, action='append', default=[], dest='p2pool_nodes')
736     parser.add_argument('-l', '--low-bandwidth',
737         help='trade lower bandwidth usage for higher latency (reduced efficiency)',
738         action='store_true', default=False, dest='low_bandwidth')
739     parser.add_argument('--disable-upnp',
740         help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
741         action='store_false', default=True, dest='upnp')
742     
743     worker_group = parser.add_argument_group('worker interface')
744     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
745         help='listen on PORT for RPC connections from miners asking for work and providing responses (default: bitcoin: 9332 namecoin: 9331 ixcoin: 9330 i0coin: 9329 solidcoin: 9328 litecoin: 9327, +10000 for testnets)',
746         type=int, action='store', default=None, dest='worker_port')
747     worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
748         help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:9332/fee . default: 0''',
749         type=float, action='store', default=0, dest='worker_fee')
750     
751     bitcoind_group = parser.add_argument_group('bitcoind interface')
752     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
753         help='connect to a bitcoind at this address (default: 127.0.0.1)',
754         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
755     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
756         help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332 ixcoin: 8338 i0coin: 7332 litecoin: 9332)',
757         type=int, action='store', default=None, dest='bitcoind_rpc_port')
758     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
759         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 namecoin: 8334 ixcoin: 8337 i0coin: 7333 solidcoin: 7555 litecoin: 9333, +10000 for testnets)',
760         type=int, action='store', default=None, dest='bitcoind_p2p_port')
761     
762     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSER',
763         help='bitcoind RPC interface username (default: empty)',
764         type=str, action='store', default='', nargs='?', dest='bitcoind_rpc_username')
765     bitcoind_group.add_argument(metavar='BITCOIND_RPCPASSWORD',
766         help='bitcoind RPC interface password',
767         type=str, action='store', dest='bitcoind_rpc_password')
768     
769     args = parser.parse_args()
770     
771     if args.debug:
772         p2pool.DEBUG = True
773     
774     if args.logfile is None:
775         args.logfile = os.path.join(os.path.dirname(sys.argv[0]), args.net_name + ('_testnet' if args.testnet else '') + '.log')
776     
777     class LogFile(object):
778         def __init__(self, filename):
779             self.filename = filename
780             self.inner_file = None
781             self.reopen()
782         def reopen(self):
783             if self.inner_file is not None:
784                 self.inner_file.close()
785             open(self.filename, 'a').close()
786             f = open(self.filename, 'rb')
787             f.seek(0, os.SEEK_END)
788             length = f.tell()
789             if length > 100*1000*1000:
790                 f.seek(-1000*1000, os.SEEK_END)
791                 while True:
792                     if f.read(1) in ('', '\n'):
793                         break
794                 data = f.read()
795                 f.close()
796                 f = open(self.filename, 'wb')
797                 f.write(data)
798             f.close()
799             self.inner_file = open(self.filename, 'a')
800         def write(self, data):
801             self.inner_file.write(data)
802         def flush(self):
803             self.inner_file.flush()
804     class TeePipe(object):
805         def __init__(self, outputs):
806             self.outputs = outputs
807         def write(self, data):
808             for output in self.outputs:
809                 output.write(data)
810         def flush(self):
811             for output in self.outputs:
812                 output.flush()
813     class TimestampingPipe(object):
814         def __init__(self, inner_file):
815             self.inner_file = inner_file
816             self.buf = ''
817             self.softspace = 0
818         def write(self, data):
819             buf = self.buf + data
820             lines = buf.split('\n')
821             for line in lines[:-1]:
822                 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
823                 self.inner_file.flush()
824             self.buf = lines[-1]
825         def flush(self):
826             pass
827     logfile = LogFile(args.logfile)
828     sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
829     if hasattr(signal, "SIGUSR1"):
830         def sigusr1(signum, frame):
831             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
832             logfile.reopen()
833             print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
834         signal.signal(signal.SIGUSR1, sigusr1)
835     task.LoopingCall(logfile.reopen).start(5)
836     
837     args.net = p2pool_data.nets[args.net_name + ('_testnet' if args.testnet else '')]
838     
839     if args.bitcoind_rpc_port is None:
840         args.bitcoind_rpc_port = args.net.BITCOIN_RPC_PORT
841     
842     if args.bitcoind_p2p_port is None:
843         args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
844     
845     if args.p2pool_port is None:
846         args.p2pool_port = args.net.P2P_PORT
847     
848     if args.worker_port is None:
849         args.worker_port = args.net.WORKER_PORT
850     
851     if args.address is not None:
852         try:
853             args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, args.net)
854         except Exception, e:
855             parser.error('error parsing address: ' + repr(e))
856     else:
857         args.pubkey_hash = None
858     
859     if (args.merged_url is None) ^ (args.merged_userpass is None):
860         parser.error('must specify --merged-url and --merged-userpass')
861     
862     reactor.callWhenRunning(main, args)
863     reactor.run()