moved read_stale_frac to Share classes
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2
3 from __future__ import division
4
5 import argparse
6 import datetime
7 import itertools
8 import os
9 import random
10 import sqlite3
11 import struct
12 import sys
13 import time
14 import json
15 import signal
16 import traceback
17
18 from twisted.internet import defer, reactor, task
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
22
23 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
24 from util import db, expiring_dict, jsonrpc, variable, deferral, math
25 from . import p2p, worker_interface, skiplists
26 import p2pool.data as p2pool
27 import p2pool as p2pool_init
28
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind, ht, net):
32     work = yield bitcoind.rpc_getmemorypool()
33     defer.returnValue(dict(
34         version=work['version'],
35         previous_block_hash=int(work['previousblockhash'], 16),
36         transactions=[bitcoin.data.tx_type.unpack(x.decode('hex')) for x in work['transactions']],
37         subsidy=work['coinbasevalue'],
38         time=work['time'],
39         target=bitcoin.data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin.data.FloatingInteger(work['bits']),
40     ))
41
42 @deferral.retry('Error getting payout script from bitcoind:', 1)
43 @defer.inlineCallbacks
44 def get_payout_script(factory):
45     res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
46     if res['reply'] == 'success':
47         defer.returnValue(res['script'])
48     elif res['reply'] == 'denied':
49         defer.returnValue(None)
50     else:
51         raise ValueError('Unexpected reply: %r' % (res,))
52
53 @deferral.retry('Error creating payout script:', 10)
54 @defer.inlineCallbacks
55 def get_payout_script2(bitcoind, net):
56     defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
57
58 @defer.inlineCallbacks
59 def main(args):
60     try:
61         print 'p2pool (version %s)' % (p2pool_init.__version__,)
62         print
63         try:
64             from . import draw
65         except ImportError:
66             draw = None
67             print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
68             print
69         
70         # connect to bitcoind over JSON-RPC and do initial getwork
71         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
72         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
73         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
74         good = yield deferral.retry('Error while checking bitcoind identity:', 1)(args.net.BITCOIN_RPC_CHECK)(bitcoind)
75         if not good:
76             print "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
77             return
78         temp_work = yield deferral.retry('Error while testing getwork:', 1)(defer.inlineCallbacks(lambda: defer.returnValue(bitcoin.getwork.BlockAttempt.from_getwork((yield bitcoind.rpc_getwork())))))()
79         print '    ...success!'
80         print '    Current block hash: %x' % (temp_work.previous_block,)
81         print
82         
83         # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
84         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
85         factory = bitcoin.p2p.ClientFactory(args.net)
86         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
87         my_script = yield get_payout_script(factory)
88         if args.pubkey_hash is None:
89             if my_script is None:
90                 print '    IP transaction denied ... falling back to sending to address.'
91                 my_script = yield get_payout_script2(bitcoind, args.net)
92         else:
93             my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
94         print '    ...success!'
95         print '    Payout script:', bitcoin.data.script2_to_human(my_script, args.net)
96         print
97         
98         print 'Loading cached block headers...'
99         ht = bitcoin.p2p.HeightTracker(factory, args.net.NAME + '_headers.dat')
100         print '   ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
101         print
102         
103         tracker = p2pool.OkayTracker(args.net)
104         shared_share_hashes = set()
105         ss = p2pool.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.NAME + '_shares.'), args.net)
106         known_verified = set()
107         print "Loading shares..."
108         for i, (mode, contents) in enumerate(ss.get_shares()):
109             if mode == 'share':
110                 if contents.hash in tracker.shares:
111                     continue
112                 shared_share_hashes.add(contents.hash)
113                 contents.time_seen = 0
114                 tracker.add(contents)
115                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
116                     print "    %i" % (len(tracker.shares),)
117             elif mode == 'verified_hash':
118                 known_verified.add(contents)
119             else:
120                 raise AssertionError()
121         print "    ...inserting %i verified shares..." % (len(known_verified),)
122         for h in known_verified:
123             if h not in tracker.shares:
124                 ss.forget_verified_share(h)
125                 continue
126             tracker.verified.add(tracker.shares[h])
127         print "    ...done loading %i shares!" % (len(tracker.shares),)
128         print
129         tracker.added.watch(lambda share: ss.add_share(share))
130         tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
131         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
132         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
133         tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
134         
135         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
136         
137         # information affecting work that should trigger a long-polling update
138         current_work = variable.Variable(None)
139         # information affecting work that should not trigger a long-polling update
140         current_work2 = variable.Variable(None)
141         
142         work_updated = variable.Event()
143         
144         requested = expiring_dict.ExpiringDict(300)
145         
146         @defer.inlineCallbacks
147         def set_real_work1():
148             work = yield getwork(bitcoind, ht, args.net)
149             changed = work['previous_block_hash'] != current_work.value['previous_block'] if current_work.value is not None else True
150             current_work.set(dict(
151                 version=work['version'],
152                 previous_block=work['previous_block_hash'],
153                 target=work['target'],
154                 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
155                 aux_work=current_work.value['aux_work'] if current_work.value is not None else None,
156             ))
157             current_work2.set(dict(
158                 time=work['time'],
159                 transactions=work['transactions'],
160                 subsidy=work['subsidy'],
161                 clock_offset=time.time() - work['time'],
162                 last_update=time.time(),
163             ))
164             if changed:
165                 set_real_work2()
166         
167         def set_real_work2():
168             best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
169             
170             t = dict(current_work.value)
171             t['best_share_hash'] = best
172             current_work.set(t)
173             
174             t = time.time()
175             for peer2, share_hash in desired:
176                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
177                     continue
178                 last_request_time, count = requested.get(share_hash, (None, 0))
179                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
180                     continue
181                 potential_peers = set()
182                 for head in tracker.tails[share_hash]:
183                     potential_peers.update(peer_heads.get(head, set()))
184                 potential_peers = [peer for peer in potential_peers if peer.connected2]
185                 if count == 0 and peer2 is not None and peer2.connected2:
186                     peer = peer2
187                 else:
188                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
189                     if peer is None:
190                         continue
191                 
192                 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
193                 peer.send_getshares(
194                     hashes=[share_hash],
195                     parents=2000,
196                     stops=list(set(tracker.heads) | set(
197                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
198                     ))[:100],
199                 )
200                 requested[share_hash] = t, count + 1
201         
202         print 'Initializing work...'
203         yield set_real_work1()
204         set_real_work2()
205         print '    ...success!'
206         print
207         
208         @defer.inlineCallbacks
209         def set_merged_work():
210             if not args.merged_url:
211                 return
212             while True:
213                 merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
214                 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged.rpc_getauxblock)()
215                 x = dict(current_work.value)
216                 x['aux_work'] = dict(
217                     hash=int(auxblock['hash'], 16),
218                     target=bitcoin.data.HashType().unpack(auxblock['target'].decode('hex')),
219                     chain_id=auxblock['chainid'],
220                 )
221                 #print x['aux_work']
222                 current_work.set(x)
223                 yield deferral.sleep(1)
224         set_merged_work()
225         
226         start_time = time.time() - current_work2.value['clock_offset']
227         
228         # setup p2p logic and join p2pool network
229         
230         def share_share(share, ignore_peer=None):
231             for peer in p2p_node.peers.itervalues():
232                 if peer is ignore_peer:
233                     continue
234                 #if p2pool_init.DEBUG:
235                 #    print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
236                 peer.sendShares([share])
237             shared_share_hashes.add(share.hash)
238         
239         def p2p_shares(shares, peer=None):
240             if len(shares) > 5:
241                 print 'Processing %i shares...' % (len(shares),)
242             
243             new_count = 0
244             for share in shares:
245                 if share.hash in tracker.shares:
246                     #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
247                     continue
248                 
249                 new_count += 1
250                 
251                 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
252                 
253                 tracker.add(share)
254             
255             if shares and peer is not None:
256                 peer_heads.setdefault(shares[0].hash, set()).add(peer)
257             
258             if new_count:
259                 set_real_work2()
260             
261             if len(shares) > 5:
262                 print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*args.net.CHAIN_LENGTH)
263         
264         @tracker.verified.added.watch
265         def _(share):
266             if share.pow_hash <= share.header['target']:
267                 if factory.conn.value is not None:
268                     factory.conn.value.send_block(block=share.as_block(tracker, args.net))
269                 else:
270                     print 'No bitcoind connection! Erp!'
271                 print
272                 print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.header_hash,)
273                 print
274         
275         def p2p_share_hashes(share_hashes, peer):
276             t = time.time()
277             get_hashes = []
278             for share_hash in share_hashes:
279                 if share_hash in tracker.shares:
280                     continue
281                 last_request_time, count = requested.get(share_hash, (None, 0))
282                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
283                     continue
284                 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
285                 get_hashes.append(share_hash)
286                 requested[share_hash] = t, count + 1
287             
288             if share_hashes and peer is not None:
289                 peer_heads.setdefault(share_hashes[0], set()).add(peer)
290             if get_hashes:
291                 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
292         
293         def p2p_get_shares(share_hashes, parents, stops, peer):
294             parents = min(parents, 1000//len(share_hashes))
295             stops = set(stops)
296             shares = []
297             for share_hash in share_hashes:
298                 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
299                     if share.hash in stops:
300                         break
301                     shares.append(share)
302             print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
303             peer.sendShares(shares, full=True)
304         
305         print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
306         
307         def parse(x):
308             if ':' in x:
309                 ip, port = x.split(':')
310                 return ip, int(port)
311             else:
312                 return x, args.net.P2P_PORT
313         
314         nodes = set([
315             ('72.14.191.28', args.net.P2P_PORT),
316             ('62.204.197.159', args.net.P2P_PORT),
317             ('142.58.248.28', args.net.P2P_PORT),
318             ('94.23.34.145', args.net.P2P_PORT),
319         ])
320         for host in [
321             'p2pool.forre.st',
322             'dabuttonfactory.com',
323         ]:
324             try:
325                 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
326             except:
327                 log.err(None, 'Error resolving bootstrap node IP:')
328
329         if args.net_name == 'litecoin':
330             nodes.add(((yield reactor.resolve('liteco.in')), args.net.P2P_PORT))
331         
332         p2p_node = p2p.Node(
333             current_work=current_work,
334             port=args.p2pool_port,
335             net=args.net,
336             addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.NAME),
337             mode=0 if args.low_bandwidth else 1,
338             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
339         )
340         p2p_node.handle_shares = p2p_shares
341         p2p_node.handle_share_hashes = p2p_share_hashes
342         p2p_node.handle_get_shares = p2p_get_shares
343         
344         p2p_node.start()
345         
346         # send share when the chain changes to their chain
347         def work_changed(new_work):
348             #print 'Work changed:', new_work
349             for share in tracker.get_chain_known(new_work['best_share_hash']):
350                 if share.hash in shared_share_hashes:
351                     break
352                 share_share(share, share.peer)
353         current_work.changed.watch(work_changed)
354         
355         print '    ...success!'
356         print
357         
358         @defer.inlineCallbacks
359         def upnp_thread():
360             while True:
361                 try:
362                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
363                     if is_lan:
364                         pm = yield portmapper.get_port_mapper()
365                         yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') # XXX try to forward external correct port?
366                 except defer.TimeoutError:
367                     pass
368                 except:
369                     if p2pool_init.DEBUG:
370                         log.err(None, "UPnP error:")
371                 yield deferral.sleep(random.expovariate(1/120))
372         
373         if args.upnp:
374             upnp_thread()
375         
376         # start listening for workers with a JSON-RPC server
377         
378         print 'Listening for workers on port %i...' % (args.worker_port,)
379         
380         # setup worker logic
381         
382         merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
383         run_identifier = struct.pack('<I', random.randrange(2**32))
384         
385         share_counter = skiplists.CountsSkipList(tracker, run_identifier)
386         removed_unstales = set()
387         def get_share_counts(doa=False):
388             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
389             matching_in_chain = share_counter(current_work.value['best_share_hash'], height) | removed_unstales
390             shares_in_chain = my_shares & matching_in_chain
391             stale_shares = my_shares - matching_in_chain
392             if doa:
393                 stale_doa_shares = stale_shares & doa_shares
394                 stale_not_doa_shares = stale_shares - stale_doa_shares
395                 return len(shares_in_chain) + len(stale_shares), len(stale_doa_shares), len(stale_not_doa_shares)
396             return len(shares_in_chain) + len(stale_shares), len(stale_shares)
397         @tracker.verified.removed.watch
398         def _(share):
399             if share.hash in my_shares and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
400                 removed_unstales.add(share.hash)
401
402         def compute(state, payout_script):
403             if payout_script is None or random.uniform(0, 100) < args.worker_fee:
404                 payout_script = my_script
405             if state['best_share_hash'] is None and args.net.PERSIST:
406                 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
407             if len(p2p_node.peers) == 0 and args.net.PERSIST:
408                 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
409             if time.time() > current_work2.value['last_update'] + 60:
410                 raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
411             
412             if state['aux_work'] is not None:
413                 aux_str = '\xfa\xbemm' + bitcoin.data.HashType().pack(state['aux_work']['hash'])[::-1] + struct.pack('<ii', 1, 0)
414             else:
415                 aux_str = ''
416             
417             # XXX assuming generate_tx is smallish here..
418             def get_stale_frac():
419                 shares, stale_shares = get_share_counts()
420                 if shares == 0:
421                     return ""
422                 frac = stale_shares/shares
423                 return 2*struct.pack('<H', int(65535*frac + .5))
424             subsidy = current_work2.value['subsidy']
425             
426             
427             if int(time.time() - current_work2.value['clock_offset']) >= p2pool.TRANSITION_TIME:
428                 timestamp = current_work2.value['time']
429                 is_new = True
430                 previous_share = tracker.shares[state['best_share_hash']] if state['best_share_hash'] is not None else None
431                 new_share_info, generate_tx = p2pool.new_generate_transaction(
432                     tracker=tracker,
433                     new_share_data=dict(
434                         previous_share_hash=state['best_share_hash'],
435                         coinbase=aux_str,
436                         nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
437                         new_script=payout_script,
438                         subsidy=subsidy,
439                         donation=math.perfect_round(65535*args.donation_percentage/100),
440                         stale_frac=(lambda shares, stales:
441                             255 if shares == 0 else math.perfect_round(254*stales/shares)
442                         )(*get_share_counts()),
443                     ),
444                     block_target=state['target'],
445                     desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
446                     net=args.net,
447                 )
448             else:
449                 timestamp = int(time.time() - current_work2.value['clock_offset'])
450                 if state['best_share_hash'] is not None:
451                     timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
452                     if timestamp2 > timestamp:
453                         print 'Toff', timestamp2 - timestamp
454                         timestamp = timestamp2
455                 is_new = False
456                 share_info, generate_tx = p2pool.generate_transaction(
457                     tracker=tracker,
458                     previous_share_hash=state['best_share_hash'],
459                     new_script=payout_script,
460                     subsidy=subsidy,
461                     nonce=run_identifier + struct.pack('<H', random.randrange(2**16)) + aux_str + get_stale_frac(),
462                     block_target=state['target'],
463                     net=args.net,
464                 )
465             
466             print 'New work for worker! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (bitcoin.data.target_to_difficulty((new_share_info if is_new else share_info['share_data'])['target']), (sum(t['value'] for t in generate_tx['tx_outs'] if t['script'] == payout_script) -subsidy//200)*1e-8, args.net.BITCOIN_SYMBOL, subsidy*1e-8, args.net.BITCOIN_SYMBOL, len(current_work2.value['transactions']))
467             #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
468             #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
469             transactions = [generate_tx] + list(current_work2.value['transactions'])
470             merkle_root = bitcoin.data.merkle_hash(transactions)
471             merkle_root_to_transactions[merkle_root] = is_new, new_share_info if is_new else share_info, transactions
472             
473             target2 = (new_share_info if is_new else share_info['share_data'])['target']
474             times[merkle_root] = time.time()
475             #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
476             return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
477         
478         my_shares = set()
479         doa_shares = set()
480         times = {}
481         
482         def got_response(data, user):
483             try:
484                 # match up with transactions
485                 header = bitcoin.getwork.decode_data(data)
486                 xxx = merkle_root_to_transactions.get(header['merkle_root'], None)
487                 if xxx is None:
488                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
489                     return False
490                 is_new, share_info, transactions = xxx
491                 new_share_info = share_info
492                 
493                 hash_ = bitcoin.data.block_header_type.hash256(header)
494                 
495                 pow_hash = args.net.BITCOIN_POW_FUNC(header)
496                 
497                 if pow_hash <= header['target'] or p2pool_init.DEBUG:
498                     if factory.conn.value is not None:
499                         factory.conn.value.send_block(block=dict(header=header, txs=transactions))
500                     else:
501                         print 'No bitcoind connection! Erp!'
502                     if pow_hash <= header['target']:
503                         print
504                         print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
505                         print
506                 
507                 if current_work.value['aux_work'] is not None and pow_hash <= current_work.value['aux_work']['target']:
508                     try:
509                         aux_pow = dict(
510                             merkle_tx=dict(
511                                 tx=transactions[0],
512                                 block_hash=hash_,
513                                 merkle_branch=[x['hash'] for x in p2pool.calculate_merkle_branch(transactions, 0)],
514                                 index=0,
515                             ),
516                             merkle_branch=[],
517                             index=0,
518                             parent_block_header=header,
519                         )
520                         
521                         a, b = transactions[0]['tx_ins'][0]['script'][-32-8:-8].encode('hex'), bitcoin.data.aux_pow_type.pack(aux_pow).encode('hex')
522                         #print a, b
523                         merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
524                         def _(res):
525                             print "MERGED RESULT:", res
526                         merged.rpc_getauxblock(a, b).addBoth(_)
527                     except:
528                         log.err(None, 'Error while processing merged mining POW:')
529                 
530                 target = (new_share_info if is_new else share_info['share_data'])['target']
531                 if pow_hash > target:
532                     print 'Worker submitted share with hash > target:\nhash  : %x\ntarget: %x' % (pow_hash, target)
533                     return False
534                 if is_new:
535                     share = p2pool.NewShare(args.net, header, new_share_info, other_txs=transactions[1:])
536                 else:
537                     share = p2pool.Share(args.net, header, share_info, other_txs=transactions[1:])
538                 my_shares.add(share.hash)
539                 if share.previous_hash != current_work.value['best_share_hash']:
540                     doa_shares.add(share.hash)
541                 print 'GOT SHARE! %s %s prev %s age %.2fs' % (user, p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[header['merkle_root']]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
542                 good = share.previous_hash == current_work.value['best_share_hash']
543                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
544                 p2p_shares([share])
545                 # eg. good = share.hash == current_work.value['best_share_hash'] here
546                 return good
547             except:
548                 log.err(None, 'Error processing data received from worker:')
549                 return False
550         
551         web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
552         
553         def get_rate():
554             if current_work.value['best_share_hash'] is not None:
555                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
556                 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 720))
557                 fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
558                 return json.dumps(int(att_s / (1. - (math.median(fracs) if fracs else 0))))
559             return json.dumps(None)
560         
561         def get_users():
562             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
563             weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
564             res = {}
565             for script in sorted(weights, key=lambda s: weights[s]):
566                 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
567             return json.dumps(res)
568         
569         class WebInterface(resource.Resource):
570             def __init__(self, func, mime_type):
571                 self.func, self.mime_type = func, mime_type
572             
573             def render_GET(self, request):
574                 request.setHeader('Content-Type', self.mime_type)
575                 return self.func()
576         
577         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
578         web_root.putChild('users', WebInterface(get_users, 'application/json'))
579         web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
580         if draw is not None:
581             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
582         
583         reactor.listenTCP(args.worker_port, server.Site(web_root))
584         
585         print '    ...success!'
586         print
587         
588         # done!
589         
590         # do new getwork when a block is heard on the p2p interface
591         
592         def new_block(block_hash):
593             work_updated.happened()
594         factory.new_block.watch(new_block)
595         
596         print 'Started successfully!'
597         print
598         
599         ht.updated.watch(set_real_work2)
600         
601         @defer.inlineCallbacks
602         def work1_thread():
603             while True:
604                 flag = work_updated.get_deferred()
605                 try:
606                     yield set_real_work1()
607                 except:
608                     log.err()
609                 yield defer.DeferredList([flag, deferral.sleep(random.uniform(1, 10))], fireOnOneCallback=True)
610         
611         @defer.inlineCallbacks
612         def work2_thread():
613             while True:
614                 try:
615                     set_real_work2()
616                 except:
617                     log.err()
618                 yield deferral.sleep(random.expovariate(1/20))
619         
620         work1_thread()
621         work2_thread()
622         
623         
624         if hasattr(signal, 'SIGALRM'):
625             def watchdog_handler(signum, frame):
626                 print 'Watchdog timer went off at:'
627                 traceback.print_stack()
628             
629             signal.signal(signal.SIGALRM, watchdog_handler)
630             task.LoopingCall(signal.alarm, 30).start(1)
631         
632         
633         pool_str = None;
634         while True:
635             yield deferral.sleep(3)
636             try:
637                 if time.time() > current_work2.value['last_update'] + 60:
638                     print '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
639                 if current_work.value['best_share_hash'] is not None:
640                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
641                     if height > 2:
642                         att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 720))
643                         weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
644                         shares, stale_doa_shares, stale_not_doa_shares = get_share_counts(True)
645                         stale_shares = stale_doa_shares + stale_not_doa_shares
646                         fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
647                         str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i' % (
648                             math.format(int(att_s / (1. - (math.median(fracs) if fracs else 0)))),
649                             height,
650                             len(tracker.verified.shares),
651                             len(tracker.shares),
652                             weights.get(my_script, 0)/total_weight*100,
653                             math.format(int(weights.get(my_script, 0)*att_s//total_weight / (1. - (math.median(fracs) if fracs else 0)))),
654                             shares,
655                             stale_not_doa_shares,
656                             stale_doa_shares,
657                             len(p2p_node.peers),
658                         ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
659                         if (str != pool_str):
660                             print str;
661                             pool_str = str;
662                         if fracs:
663                             med = math.median(fracs)
664                             print 'Median stale proportion:', med
665                             if shares:
666                                 print '    Own:', stale_shares/shares
667                                 if med < .99:
668                                     print '    Own efficiency: %.02f%%' % (100*(1 - stale_shares/shares)/(1 - med),)
669             
670             
671             except:
672                 log.err()
673     except:
674         log.err(None, 'Fatal error:')
675     finally:
676         reactor.stop()
677
678 def run():
679     class FixedArgumentParser(argparse.ArgumentParser):
680         def _read_args_from_files(self, arg_strings):
681             # expand arguments referencing files
682             new_arg_strings = []
683             for arg_string in arg_strings:
684                 
685                 # for regular arguments, just add them back into the list
686                 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
687                     new_arg_strings.append(arg_string)
688                 
689                 # replace arguments referencing files with the file content
690                 else:
691                     try:
692                         args_file = open(arg_string[1:])
693                         try:
694                             arg_strings = []
695                             for arg_line in args_file.read().splitlines():
696                                 for arg in self.convert_arg_line_to_args(arg_line):
697                                     arg_strings.append(arg)
698                             arg_strings = self._read_args_from_files(arg_strings)
699                             new_arg_strings.extend(arg_strings)
700                         finally:
701                             args_file.close()
702                     except IOError:
703                         err = sys.exc_info()[1]
704                         self.error(str(err))
705             
706             # return the modified argument list
707             return new_arg_strings
708         
709         def convert_arg_line_to_args(self, arg_line):
710             return [arg for arg in arg_line.split() if arg.strip()]
711     
712     parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,), fromfile_prefix_chars='@')
713     parser.add_argument('--version', action='version', version=p2pool_init.__version__)
714     parser.add_argument('--net',
715         help='use specified network (default: bitcoin)',
716         action='store', choices=sorted(x for x in p2pool.nets if 'testnet' not in x), default='bitcoin', dest='net_name')
717     parser.add_argument('--testnet',
718         help='''use the network's testnet''',
719         action='store_const', const=True, default=False, dest='testnet')
720     parser.add_argument('--debug',
721         help='debugging mode',
722         action='store_const', const=True, default=False, dest='debug')
723     parser.add_argument('-a', '--address',
724         help='generate to this address (defaults to requesting one from bitcoind)',
725         type=str, action='store', default=None, dest='address')
726     parser.add_argument('--logfile',
727         help='''log to specific file (defaults to <network_name>.log in run_p2pool.py's directory)''',
728         type=str, action='store', default=None, dest='logfile')
729     parser.add_argument('--merged-url',
730         help='call getauxblock on this url to get work for merged mining',
731         type=str, action='store', default=None, dest='merged_url')
732     parser.add_argument('--merged-userpass',
733         help='merge daemon user and password, separated by a colon. Example: ncuser:ncpass',
734         type=str, action='store', default=None, dest='merged_userpass')
735     parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
736         help='percentage amount to donate to author of p2pool. Default: 0.5',
737         type=float, action='store', default=0.5, dest='donation_percentage')
738     
739     p2pool_group = parser.add_argument_group('p2pool interface')
740     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
741         help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
742         type=int, action='store', default=None, dest='p2pool_port')
743     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
744         help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
745         type=str, action='append', default=[], dest='p2pool_nodes')
746     parser.add_argument('-l', '--low-bandwidth',
747         help='trade lower bandwidth usage for higher latency (reduced efficiency)',
748         action='store_true', default=False, dest='low_bandwidth')
749     parser.add_argument('--disable-upnp',
750         help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
751         action='store_false', default=True, dest='upnp')
752     
753     worker_group = parser.add_argument_group('worker interface')
754     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
755         help='listen on PORT for RPC connections from miners asking for work and providing responses (default: bitcoin: 9332 namecoin: 9331 ixcoin: 9330 i0coin: 9329 solidcoin: 9328 litecoin: 9327, +10000 for testnets)',
756         type=int, action='store', default=None, dest='worker_port')
757     worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
758         help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:9332/fee . default: 0''',
759         type=float, action='store', default=0, dest='worker_fee')
760     
761     bitcoind_group = parser.add_argument_group('bitcoind interface')
762     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
763         help='connect to a bitcoind at this address (default: 127.0.0.1)',
764         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
765     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
766         help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332 ixcoin: 8338 i0coin: 7332 litecoin: 9332)',
767         type=int, action='store', default=None, dest='bitcoind_rpc_port')
768     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
769         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 namecoin: 8334 ixcoin: 8337 i0coin: 7333 solidcoin: 7555 litecoin: 9333, +10000 for testnets)',
770         type=int, action='store', default=None, dest='bitcoind_p2p_port')
771     
772     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSER',
773         help='bitcoind RPC interface username (default: empty)',
774         type=str, action='store', default='', nargs='?', dest='bitcoind_rpc_username')
775     bitcoind_group.add_argument(metavar='BITCOIND_RPCPASSWORD',
776         help='bitcoind RPC interface password',
777         type=str, action='store', dest='bitcoind_rpc_password')
778     
779     args = parser.parse_args()
780     
781     if args.debug:
782         p2pool_init.DEBUG = True
783     
784     if args.logfile is None:
785         args.logfile = os.path.join(os.path.dirname(sys.argv[0]), args.net_name + ('_testnet' if args.testnet else '') + '.log')
786     
787     class LogFile(object):
788         def __init__(self, filename):
789             self.filename = filename
790             self.inner_file = None
791             self.reopen()
792         def reopen(self):
793             if self.inner_file is not None:
794                 self.inner_file.close()
795             open(self.filename, 'a').close()
796             f = open(self.filename, 'rb')
797             f.seek(0, os.SEEK_END)
798             length = f.tell()
799             if length > 100*1000*1000:
800                 f.seek(-1000*1000, os.SEEK_END)
801                 while True:
802                     if f.read(1) in ('', '\n'):
803                         break
804                 data = f.read()
805                 f.close()
806                 f = open(self.filename, 'wb')
807                 f.write(data)
808             f.close()
809             self.inner_file = open(self.filename, 'a')
810         def write(self, data):
811             self.inner_file.write(data)
812         def flush(self):
813             self.inner_file.flush()
814     class TeePipe(object):
815         def __init__(self, outputs):
816             self.outputs = outputs
817         def write(self, data):
818             for output in self.outputs:
819                 output.write(data)
820         def flush(self):
821             for output in self.outputs:
822                 output.flush()
823     class TimestampingPipe(object):
824         def __init__(self, inner_file):
825             self.inner_file = inner_file
826             self.buf = ''
827             self.softspace = 0
828         def write(self, data):
829             buf = self.buf + data
830             lines = buf.split('\n')
831             for line in lines[:-1]:
832                 self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
833                 self.inner_file.flush()
834             self.buf = lines[-1]
835         def flush(self):
836             pass
837     logfile = LogFile(args.logfile)
838     sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
839     if hasattr(signal, "SIGUSR1"):
840         def sigusr1(signum, frame):
841             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
842             logfile.reopen()
843             print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
844         signal.signal(signal.SIGUSR1, sigusr1)
845     task.LoopingCall(logfile.reopen).start(5)
846     
847     args.net = p2pool.nets[args.net_name + ('_testnet' if args.testnet else '')]
848     
849     if args.bitcoind_rpc_port is None:
850         args.bitcoind_rpc_port = args.net.BITCOIN_RPC_PORT
851     
852     if args.bitcoind_p2p_port is None:
853         args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
854     
855     if args.p2pool_port is None:
856         args.p2pool_port = args.net.P2P_PORT
857     
858     if args.worker_port is None:
859         args.worker_port = args.net.WORKER_PORT
860     
861     if args.address is not None:
862         try:
863             args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
864         except Exception, e:
865             parser.error('error parsing address: ' + repr(e))
866     else:
867         args.pubkey_hash = None
868     
869     if (args.merged_url is None) ^ (args.merged_userpass is None):
870         parser.error('must specify --merged-url and --merged-userpass')
871     
872     reactor.callWhenRunning(main, args)
873     reactor.run()