protocol change scheduled for mar 4 and feb 26 for litecoin
[p2pool.git] / p2pool / main.py
1 from __future__ import division
2
3 import ConfigParser
4 import StringIO
5 import argparse
6 import os
7 import random
8 import struct
9 import sys
10 import time
11 import json
12 import signal
13 import traceback
14 import urlparse
15
16 from twisted.internet import defer, error, reactor, protocol, task
17 from twisted.web import server, resource
18 from twisted.python import log
19 from nattraverso import portmapper, ipdiscover
20
21 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
22 from bitcoin import worker_interface
23 from util import expiring_dict, jsonrpc, variable, deferral, math, logging, pack
24 from . import p2p, networks, graphs
25 import p2pool, p2pool.data as p2pool_data
26
27 @deferral.retry('Error getting work from bitcoind:', 3)
28 @defer.inlineCallbacks
29 def getwork(bitcoind):
30     try:
31         work = yield bitcoind.rpc_getmemorypool()
32     except jsonrpc.Error, e:
33         if e.code == -32601: # Method not found
34             print >>sys.stderr, 'Error: Bitcoin version too old! Upgrade to v0.5 or newer!'
35             raise deferral.RetrySilentlyException()
36         raise
37     packed_transactions = [x.decode('hex') for x in work['transactions']]
38     defer.returnValue(dict(
39         version=work['version'],
40         previous_block_hash=int(work['previousblockhash'], 16),
41         transactions=map(bitcoin_data.tx_type.unpack, packed_transactions),
42         merkle_branch=bitcoin_data.calculate_merkle_branch([0] + map(bitcoin_data.hash256, packed_transactions), 0),
43         subsidy=work['coinbasevalue'],
44         time=work['time'],
45         bits=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
46         coinbaseflags=work['coinbaseflags'].decode('hex') if 'coinbaseflags' in work else ''.join(x.decode('hex') for x in work['coinbaseaux'].itervalues()) if 'coinbaseaux' in work else '',
47     ))
48
49 @defer.inlineCallbacks
50 def main(args, net, datadir_path, merged_urls, worker_endpoint):
51     try:
52         print 'p2pool (version %s)' % (p2pool.__version__,)
53         print
54         try:
55             from . import draw
56         except ImportError:
57             draw = None
58             print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
59             print
60         
61         # connect to bitcoind over JSON-RPC and do initial getmemorypool
62         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
63         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
64         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
65         good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.PARENT.RPC_CHECK)(bitcoind)
66         if not good:
67             print >>sys.stderr, "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
68             return
69         temp_work = yield getwork(bitcoind)
70         print '    ...success!'
71         print '    Current block hash: %x' % (temp_work['previous_block_hash'],)
72         print
73         
74         # connect to bitcoind over bitcoin-p2p
75         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
76         factory = bitcoin_p2p.ClientFactory(net.PARENT)
77         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
78         yield factory.getProtocol() # waits until handshake is successful
79         print '    ...success!'
80         print
81         
82         print 'Determining payout address...'
83         if args.pubkey_hash is None:
84             address_path = os.path.join(datadir_path, 'cached_payout_address')
85             
86             if os.path.exists(address_path):
87                 with open(address_path, 'rb') as f:
88                     address = f.read().strip('\r\n')
89                 print '    Loaded cached address: %s...' % (address,)
90             else:
91                 address = None
92             
93             if address is not None:
94                 res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))()
95                 if not res['isvalid'] or not res['ismine']:
96                     print '    Cached address is either invalid or not controlled by local bitcoind!'
97                     address = None
98             
99             if address is None:
100                 print '    Getting payout address from bitcoind...'
101                 address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()
102             
103             with open(address_path, 'wb') as f:
104                 f.write(address)
105             
106             my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(address, net.PARENT)
107         else:
108             my_pubkey_hash = args.pubkey_hash
109         print '    ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)
110         print
111         
112         my_share_hashes = set()
113         my_doa_share_hashes = set()
114         
115         tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes)
116         shared_share_hashes = set()
117         ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
118         known_verified = set()
119         recent_blocks = []
120         print "Loading shares..."
121         for i, (mode, contents) in enumerate(ss.get_shares()):
122             if mode == 'share':
123                 if contents.hash in tracker.shares:
124                     continue
125                 shared_share_hashes.add(contents.hash)
126                 contents.time_seen = 0
127                 tracker.add(contents)
128                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
129                     print "    %i" % (len(tracker.shares),)
130             elif mode == 'verified_hash':
131                 known_verified.add(contents)
132             else:
133                 raise AssertionError()
134         print "    ...inserting %i verified shares..." % (len(known_verified),)
135         for h in known_verified:
136             if h not in tracker.shares:
137                 ss.forget_verified_share(h)
138                 continue
139             tracker.verified.add(tracker.shares[h])
140         print "    ...done loading %i shares!" % (len(tracker.shares),)
141         print
142         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
143         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
144         tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
145         
146         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
147         
148         pre_current_work = variable.Variable(None)
149         pre_merged_work = variable.Variable({})
150         # information affecting work that should trigger a long-polling update
151         current_work = variable.Variable(None)
152         # information affecting work that should not trigger a long-polling update
153         current_work2 = variable.Variable(None)
154         
155         requested = expiring_dict.ExpiringDict(300)
156         
157         print 'Initializing work...'
158         @defer.inlineCallbacks
159         def set_real_work1():
160             work = yield getwork(bitcoind)
161             current_work2.set(dict(
162                 time=work['time'],
163                 transactions=work['transactions'],
164                 merkle_branch=work['merkle_branch'],
165                 subsidy=work['subsidy'],
166                 clock_offset=time.time() - work['time'],
167                 last_update=time.time(),
168             )) # second set first because everything hooks on the first
169             pre_current_work.set(dict(
170                 version=work['version'],
171                 previous_block=work['previous_block_hash'],
172                 bits=work['bits'],
173                 coinbaseflags=work['coinbaseflags'],
174             ))
175         yield set_real_work1()
176         
177         if '\ngetblock ' in (yield deferral.retry()(bitcoind.rpc_help)()):
178             height_cacher = deferral.DeferredCacher(defer.inlineCallbacks(lambda block_hash: defer.returnValue((yield bitcoind.rpc_getblock('%x' % (block_hash,)))['blockcount'])))
179             best_height_cached = variable.Variable((yield deferral.retry()(height_cacher)(pre_current_work.value['previous_block'])))
180             def get_height_rel_highest(block_hash):
181                 this_height = height_cacher.call_now(block_hash, 0)
182                 best_height = height_cacher.call_now(pre_current_work.value['previous_block'], 0)
183                 best_height_cached.set(max(best_height_cached.value, this_height, best_height))
184                 return this_height - best_height_cached.value
185         else:
186             get_height_rel_highest = bitcoin_p2p.HeightTracker(bitcoind, factory, 5*net.SHARE_PERIOD*net.CHAIN_LENGTH/net.PARENT.BLOCK_PERIOD).get_height_rel_highest
187         
188         def set_real_work2():
189             best, desired = tracker.think(get_height_rel_highest, pre_current_work.value['previous_block'], pre_current_work.value['bits'])
190             
191             t = dict(pre_current_work.value)
192             t['best_share_hash'] = best
193             t['mm_chains'] = pre_merged_work.value
194             current_work.set(t)
195             
196             t = time.time()
197             for peer2, share_hash in desired:
198                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
199                     continue
200                 last_request_time, count = requested.get(share_hash, (None, 0))
201                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
202                     continue
203                 potential_peers = set()
204                 for head in tracker.tails[share_hash]:
205                     potential_peers.update(peer_heads.get(head, set()))
206                 potential_peers = [peer for peer in potential_peers if peer.connected2]
207                 if count == 0 and peer2 is not None and peer2.connected2:
208                     peer = peer2
209                 else:
210                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
211                     if peer is None:
212                         continue
213                 
214                 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
215                 peer.send_getshares(
216                     hashes=[share_hash],
217                     parents=2000,
218                     stops=list(set(tracker.heads) | set(
219                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
220                     ))[:100],
221                 )
222                 requested[share_hash] = t, count + 1
223         pre_current_work.changed.watch(lambda _: set_real_work2())
224         pre_merged_work.changed.watch(lambda _: set_real_work2())
225         set_real_work2()
226         print '    ...success!'
227         print
228         
229         
230         @defer.inlineCallbacks
231         def set_merged_work(merged_url, merged_userpass):
232             merged_proxy = jsonrpc.Proxy(merged_url, (merged_userpass,))
233             while True:
234                 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged_proxy.rpc_getauxblock)()
235                 pre_merged_work.set(dict(pre_merged_work.value, **{auxblock['chainid']: dict(
236                     hash=int(auxblock['hash'], 16),
237                     target=pack.IntType(256).unpack(auxblock['target'].decode('hex')),
238                     merged_proxy=merged_proxy,
239                 )}))
240                 yield deferral.sleep(1)
241         for merged_url, merged_userpass in merged_urls:
242             set_merged_work(merged_url, merged_userpass)
243         
244         @pre_merged_work.changed.watch
245         def _(new_merged_work):
246             print 'Got new merged mining work!'
247         
248         # setup p2p logic and join p2pool network
249         
250         class Node(p2p.Node):
251             def handle_shares(self, shares, peer):
252                 if len(shares) > 5:
253                     print 'Processing %i shares from %s...' % (len(shares), '%s:%i' % peer.addr if peer is not None else None)
254                 
255                 new_count = 0
256                 for share in shares:
257                     if share.hash in tracker.shares:
258                         #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
259                         continue
260                     
261                     new_count += 1
262                     
263                     #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
264                     
265                     tracker.add(share)
266                 
267                 if shares and peer is not None:
268                     peer_heads.setdefault(shares[0].hash, set()).add(peer)
269                 
270                 if new_count:
271                     set_real_work2()
272                 
273                 if len(shares) > 5:
274                     print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
275             
276             def handle_share_hashes(self, hashes, peer):
277                 t = time.time()
278                 get_hashes = []
279                 for share_hash in hashes:
280                     if share_hash in tracker.shares:
281                         continue
282                     last_request_time, count = requested.get(share_hash, (None, 0))
283                     if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
284                         continue
285                     print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
286                     get_hashes.append(share_hash)
287                     requested[share_hash] = t, count + 1
288                 
289                 if hashes and peer is not None:
290                     peer_heads.setdefault(hashes[0], set()).add(peer)
291                 if get_hashes:
292                     peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
293             
294             def handle_get_shares(self, hashes, parents, stops, peer):
295                 parents = min(parents, 1000//len(hashes))
296                 stops = set(stops)
297                 shares = []
298                 for share_hash in hashes:
299                     for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))):
300                         if share.hash in stops:
301                             break
302                         shares.append(share)
303                 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
304                 peer.sendShares(shares)
305         
306         @tracker.verified.added.watch
307         def _(share):
308             if share.pow_hash <= share.header['bits'].target:
309                 if factory.conn.value is not None:
310                     factory.conn.value.send_block(block=share.as_block(tracker))
311                 else:
312                     print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Erp!'
313                 print
314                 print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % (p2pool_data.format_hash(share.hash), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
315                 print
316                 recent_blocks.append(dict(ts=share.timestamp, hash='%064x' % (share.header_hash,)))
317         
318         print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
319         
320         @defer.inlineCallbacks
321         def parse(x):
322             if ':' in x:
323                 ip, port = x.split(':')
324                 defer.returnValue(((yield reactor.resolve(ip)), int(port)))
325             else:
326                 defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT))
327         
328         addrs = {}
329         if os.path.exists(os.path.join(datadir_path, 'addrs.txt')):
330             try:
331                 addrs.update(dict(eval(x) for x in open(os.path.join(datadir_path, 'addrs.txt'))))
332             except:
333                 print >>sys.stderr, "error reading addrs"
334         for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
335             try:
336                 addr = yield addr_df
337                 if addr not in addrs:
338                     addrs[addr] = (0, time.time(), time.time())
339             except:
340                 log.err()
341         
342         connect_addrs = set()
343         for addr_df in map(parse, args.p2pool_nodes):
344             try:
345                 connect_addrs.add((yield addr_df))
346             except:
347                 log.err()
348         
349         p2p_node = Node(
350             best_share_hash_func=lambda: current_work.value['best_share_hash'],
351             port=args.p2pool_port,
352             net=net,
353             addr_store=addrs,
354             connect_addrs=connect_addrs,
355         )
356         p2p_node.start()
357         
358         def save_addrs():
359             open(os.path.join(datadir_path, 'addrs.txt'), 'w').writelines(repr(x) + '\n' for x in p2p_node.addr_store.iteritems())
360         task.LoopingCall(save_addrs).start(60)
361         
362         # send share when the chain changes to their chain
363         def work_changed(new_work):
364             #print 'Work changed:', new_work
365             shares = []
366             for share in tracker.get_chain(new_work['best_share_hash'], min(5, tracker.get_height(new_work['best_share_hash']))):
367                 if share.hash in shared_share_hashes:
368                     break
369                 shared_share_hashes.add(share.hash)
370                 shares.append(share)
371             
372             for peer in p2p_node.peers.itervalues():
373                 peer.sendShares([share for share in shares if share.peer is not peer])
374         
375         current_work.changed.watch(work_changed)
376         
377         def save_shares():
378             for share in tracker.get_chain(current_work.value['best_share_hash'], min(tracker.get_height(current_work.value['best_share_hash']), 2*net.CHAIN_LENGTH)):
379                 ss.add_share(share)
380                 if share.hash in tracker.verified.shares:
381                     ss.add_verified_hash(share.hash)
382         task.LoopingCall(save_shares).start(60)
383         
384         print '    ...success!'
385         print
386         
387         start_time = time.time()
388         
389         @defer.inlineCallbacks
390         def upnp_thread():
391             while True:
392                 try:
393                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
394                     if is_lan:
395                         pm = yield portmapper.get_port_mapper()
396                         yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
397                 except defer.TimeoutError:
398                     pass
399                 except:
400                     if p2pool.DEBUG:
401                         log.err(None, "UPnP error:")
402                 yield deferral.sleep(random.expovariate(1/120))
403         
404         if args.upnp:
405             upnp_thread()
406         
407         # start listening for workers with a JSON-RPC server
408         
409         print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1])
410         
411         if os.path.exists(os.path.join(datadir_path, 'vip_pass')):
412             with open(os.path.join(datadir_path, 'vip_pass'), 'rb') as f:
413                 vip_pass = f.read().strip('\r\n')
414         else:
415             vip_pass = '%016x' % (random.randrange(2**64),)
416             with open(os.path.join(datadir_path, 'vip_pass'), 'wb') as f:
417                 f.write(vip_pass)
418         print '    Worker password:', vip_pass, '(only required for generating graphs)'
419         
420         # setup worker logic
421         
422         removed_unstales_var = variable.Variable((0, 0, 0))
423         removed_doa_unstales_var = variable.Variable(0)
424         @tracker.verified.removed.watch
425         def _(share):
426             if share.hash in my_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
427                 assert share.share_data['stale_info'] in [0, 253, 254] # we made these shares in this instance
428                 removed_unstales_var.set((
429                     removed_unstales_var.value[0] + 1,
430                     removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 253 else 0),
431                     removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 254 else 0),
432                 ))
433             if share.hash in my_doa_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
434                 removed_doa_unstales.set(removed_doa_unstales.value + 1)
435         
436         def get_stale_counts():
437             '''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)'''
438             my_shares = len(my_share_hashes)
439             my_doa_shares = len(my_doa_share_hashes)
440             delta = tracker.verified.get_delta(current_work.value['best_share_hash'])
441             my_shares_in_chain = delta.my_count + removed_unstales_var.value[0]
442             my_doa_shares_in_chain = delta.my_doa_count + removed_doa_unstales_var.value
443             orphans_recorded_in_chain = delta.my_orphan_announce_count + removed_unstales_var.value[1]
444             doas_recorded_in_chain = delta.my_dead_announce_count + removed_unstales_var.value[2]
445             
446             my_shares_not_in_chain = my_shares - my_shares_in_chain
447             my_doa_shares_not_in_chain = my_doa_shares - my_doa_shares_in_chain
448             
449             return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
450         
451         
452         local_rate_monitor = math.RateMonitor(10*60)
453         
454         class WorkerBridge(worker_interface.WorkerBridge):
455             def __init__(self):
456                 worker_interface.WorkerBridge.__init__(self)
457                 self.new_work_event = current_work.changed
458                 self.recent_shares_ts_work = []
459             
460             def preprocess_request(self, request):
461                 user = request.getUser() if request.getUser() is not None else ''
462                 pubkey_hash = my_pubkey_hash
463                 max_target = 2**256 - 1
464                 if '/' in user:
465                     user, min_diff_str = user.rsplit('/', 1)
466                     try:
467                         max_target = bitcoin_data.difficulty_to_target(float(min_diff_str))
468                     except:
469                         pass
470                 try:
471                     pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, net.PARENT)
472                 except: # XXX blah
473                     pass
474                 if random.uniform(0, 100) < args.worker_fee:
475                     pubkey_hash = my_pubkey_hash
476                 return pubkey_hash, max_target
477             
478             def get_work(self, pubkey_hash, max_target):
479                 if len(p2p_node.peers) == 0 and net.PERSIST:
480                     raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
481                 if current_work.value['best_share_hash'] is None and net.PERSIST:
482                     raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
483                 if time.time() > current_work2.value['last_update'] + 60:
484                     raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
485                 
486                 if current_work.value['mm_chains']:
487                     tree, size = bitcoin_data.make_auxpow_tree(current_work.value['mm_chains'])
488                     mm_hashes = [current_work.value['mm_chains'].get(tree.get(i), dict(hash=0))['hash'] for i in xrange(size)]
489                     mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(dict(
490                         merkle_root=bitcoin_data.merkle_hash(mm_hashes),
491                         size=size,
492                         nonce=0,
493                     ))
494                     mm_later = [(aux_work, mm_hashes.index(aux_work['hash']), mm_hashes) for chain_id, aux_work in current_work.value['mm_chains'].iteritems()]
495                 else:
496                     mm_data = ''
497                     mm_later = []
498                 
499                 new = time.time() > net.SWITCH_TIME
500                 
501                 if new:
502                     share_info, generate_tx = p2pool_data.new_generate_transaction(
503                         tracker=tracker,
504                         share_data=dict(
505                             previous_share_hash=current_work.value['best_share_hash'],
506                             coinbase=(mm_data + current_work.value['coinbaseflags'])[:100],
507                             nonce=random.randrange(2**32),
508                             pubkey_hash=pubkey_hash,
509                             subsidy=current_work2.value['subsidy'],
510                             donation=math.perfect_round(65535*args.donation_percentage/100),
511                             stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
512                                 253 if orphans > orphans_recorded_in_chain else
513                                 254 if doas > doas_recorded_in_chain else
514                                 0
515                             )(*get_stale_counts()),
516                         ),
517                         block_target=current_work.value['bits'].target,
518                         desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
519                         desired_target=max_target,
520                         net=net,
521                     )
522                 else:
523                     share_info, generate_tx = p2pool_data.generate_transaction(
524                         tracker=tracker,
525                         share_data=dict(
526                             previous_share_hash=current_work.value['best_share_hash'],
527                             coinbase=(mm_data + current_work.value['coinbaseflags'])[:100],
528                             nonce=struct.pack('<Q', random.randrange(2**64)),
529                             new_script=bitcoin_data.pubkey_hash_to_script2(pubkey_hash),
530                             subsidy=current_work2.value['subsidy'],
531                             donation=math.perfect_round(65535*args.donation_percentage/100),
532                             stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
533                                 253 if orphans > orphans_recorded_in_chain else
534                                 254 if doas > doas_recorded_in_chain else
535                                 0
536                             )(*get_stale_counts()),
537                         ),
538                         block_target=current_work.value['bits'].target,
539                         desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
540                         net=net,
541                     )
542                 
543                 target = net.PARENT.SANE_MAX_TARGET
544                 if len(self.recent_shares_ts_work) == 50:
545                     hash_rate = sum(work for ts, work in self.recent_shares_ts_work)//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
546                     target = min(target, 2**256//(hash_rate * 5))
547                 target = max(target, share_info['bits'].target)
548                 for aux_work in current_work.value['mm_chains'].itervalues():
549                     target = max(target, aux_work['target'])
550                 
551                 transactions = [generate_tx] + list(current_work2.value['transactions'])
552                 packed_generate_tx = bitcoin_data.tx_type.pack(generate_tx)
553                 merkle_root = bitcoin_data.check_merkle_branch(bitcoin_data.hash256(packed_generate_tx), 0, current_work2.value['merkle_branch'])
554                 
555                 getwork_time = time.time()
556                 merkle_branch = current_work2.value['merkle_branch']
557                 
558                 print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % (
559                     bitcoin_data.target_to_difficulty(target),
560                     bitcoin_data.target_to_difficulty(share_info['bits'].target),
561                     current_work2.value['subsidy']*1e-8, net.PARENT.SYMBOL,
562                     len(current_work2.value['transactions']),
563                 )
564                 
565                 ba = bitcoin_getwork.BlockAttempt(
566                     version=current_work.value['version'],
567                     previous_block=current_work.value['previous_block'],
568                     merkle_root=merkle_root,
569                     timestamp=current_work2.value['time'],
570                     bits=current_work.value['bits'],
571                     share_target=target,
572                 )
573                 
574                 received_header_hashes = set()
575                 
576                 def got_response(header, request):
577                     assert header['merkle_root'] == merkle_root
578                     
579                     header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header))
580                     pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
581                     on_time = current_work.value['best_share_hash'] == share_info['share_data']['previous_share_hash']
582                     
583                     try:
584                         if pow_hash <= header['bits'].target or p2pool.DEBUG:
585                             @deferral.retry('Error submitting primary block: (will retry)', 10, 10)
586                             def submit_block():
587                                 if factory.conn.value is None:
588                                     print >>sys.stderr, 'No bitcoind connection when block submittal attempted! %s%32x' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
589                                     raise deferral.RetrySilentlyException()
590                                 factory.conn.value.send_block(block=dict(header=header, txs=transactions))
591                             submit_block()
592                             if pow_hash <= header['bits'].target:
593                                 print
594                                 print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
595                                 print
596                                 recent_blocks.append(dict(ts=time.time(), hash='%064x' % (header_hash,)))
597                     except:
598                         log.err(None, 'Error while processing potential block:')
599                     
600                     for aux_work, index, hashes in mm_later:
601                         try:
602                             if pow_hash <= aux_work['target'] or p2pool.DEBUG:
603                                 df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)(
604                                     pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
605                                     bitcoin_data.aux_pow_type.pack(dict(
606                                         merkle_tx=dict(
607                                             tx=transactions[0],
608                                             block_hash=header_hash,
609                                             merkle_branch=merkle_branch,
610                                             index=0,
611                                         ),
612                                         merkle_branch=bitcoin_data.calculate_merkle_branch(hashes, index),
613                                         index=index,
614                                         parent_block_header=header,
615                                     )).encode('hex'),
616                                 )
617                                 @df.addCallback
618                                 def _(result):
619                                     if result != (pow_hash <= aux_work['target']):
620                                         print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
621                                     else:
622                                         print 'Merged block submittal result: %s' % (result,)
623                                 @df.addErrback
624                                 def _(err):
625                                     log.err(err, 'Error submitting merged block:')
626                         except:
627                             log.err(None, 'Error while processing merged mining POW:')
628                     
629                     if pow_hash <= share_info['bits'].target:
630                         if new:
631                             min_header = dict(header);del min_header['merkle_root']
632                             hash_link = p2pool_data.prefix_to_hash_link(packed_generate_tx[:-32-4], p2pool_data.gentx_before_refhash)
633                             share = p2pool_data.NewShare(net, min_header, share_info, hash_link=hash_link, merkle_branch=merkle_branch, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
634                         else:
635                             share = p2pool_data.Share(net, header, share_info, merkle_branch=merkle_branch, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
636                         print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
637                             request.getUser(),
638                             p2pool_data.format_hash(share.hash),
639                             p2pool_data.format_hash(share.previous_hash),
640                             time.time() - getwork_time,
641                             ' DEAD ON ARRIVAL' if not on_time else '',
642                         )
643                         my_share_hashes.add(share.hash)
644                         if not on_time:
645                             my_doa_share_hashes.add(share.hash)
646                         
647                         tracker.add(share)
648                         if not p2pool.DEBUG:
649                             tracker.verified.add(share)
650                         set_real_work2()
651                         
652                         try:
653                             if pow_hash <= header['bits'].target or p2pool.DEBUG:
654                                 for peer in p2p_node.peers.itervalues():
655                                     peer.sendShares([share])
656                                 shared_share_hashes.add(share.hash)
657                         except:
658                             log.err(None, 'Error forwarding block solution:')
659                     
660                     if pow_hash <= target and header_hash not in received_header_hashes:
661                         reactor.callLater(1, grapher.add_localrate_point, bitcoin_data.target_to_average_attempts(target), not on_time)
662                         if request.getPassword() == vip_pass:
663                             reactor.callLater(1, grapher.add_localminer_point, request.getUser(), bitcoin_data.target_to_average_attempts(target), not on_time)
664                         self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
665                         while len(self.recent_shares_ts_work) > 50:
666                             self.recent_shares_ts_work.pop(0)
667                         local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=request.getUser()))
668                     
669                     if header_hash in received_header_hashes:
670                         print >>sys.stderr, 'Worker %s @ %s submitted share more than once!' % (request.getUser(), request.getClientIP())
671                     received_header_hashes.add(header_hash)
672                     
673                     if pow_hash > target:
674                         print 'Worker %s submitted share with hash > target:' % (request.getUser(),)
675                         print '    Hash:   %56x' % (pow_hash,)
676                         print '    Target: %56x' % (target,)
677                     
678                     return on_time
679                 
680                 return ba, got_response
681         
682         web_root = resource.Resource()
683         worker_interface.WorkerInterface(WorkerBridge()).attach_to(web_root)
684         
685         def get_rate():
686             if tracker.get_height(current_work.value['best_share_hash']) < 720:
687                 return json.dumps(None)
688             return json.dumps(p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
689                 / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720)))
690         
691         def get_users():
692             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
693             weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
694             res = {}
695             for script in sorted(weights, key=lambda s: weights[s]):
696                 res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
697             return json.dumps(res)
698         
699         def get_current_txouts():
700             share = tracker.shares[current_work.value['best_share_hash']]
701             if isinstance(share, p2pool_data.NewShare):
702                 share_info, gentx = p2pool_data.new_generate_transaction(tracker, share.share_info['share_data'], share.header['bits'].target, share.share_info['timestamp'], share.share_info['bits'].target, share.net)
703             else:
704                 share_info, gentx = p2pool_data.generate_transaction(tracker, share.share_info['share_data'], share.header['bits'].target, share.share_info['timestamp'], share.net)
705             return dict((out['script'], out['value']) for out in gentx['tx_outs'])
706         
707         def get_current_scaled_txouts(scale, trunc=0):
708             txouts = get_current_txouts()
709             total = sum(txouts.itervalues())
710             results = dict((script, value*scale//total) for script, value in txouts.iteritems())
711             if trunc > 0:
712                 total_random = 0
713                 random_set = set()
714                 for s in sorted(results, key=results.__getitem__):
715                     if results[s] >= trunc:
716                         break
717                     total_random += results[s]
718                     random_set.add(s)
719                 if total_random:
720                     winner = math.weighted_choice((script, results[script]) for script in random_set)
721                     for script in random_set:
722                         del results[script]
723                     results[winner] = total_random
724             if sum(results.itervalues()) < int(scale):
725                 results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
726             return results
727         
728         def get_current_payouts():
729             return json.dumps(dict((bitcoin_data.script2_to_human(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems()))
730         
731         def get_patron_sendmany(this):
732             try:
733                 if '/' in this:
734                     this, trunc = this.split('/', 1)
735                 else:
736                     trunc = '0.01'
737                 return json.dumps(dict(
738                     (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
739                     for script, value in get_current_scaled_txouts(scale=int(float(this)*1e8), trunc=int(float(trunc)*1e8)).iteritems()
740                     if bitcoin_data.script2_to_address(script, net.PARENT) is not None
741                 ))
742             except:
743                 return json.dumps(None)
744         
745         def get_global_stats():
746             # averaged over last hour
747             lookbehind = 3600//net.SHARE_PERIOD
748             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
749                 return None
750             
751             nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
752             stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
753             return json.dumps(dict(
754                 pool_nonstale_hash_rate=nonstale_hash_rate,
755                 pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
756                 pool_stale_prop=stale_prop,
757             ))
758         
759         def get_local_stats():
760             lookbehind = 3600//net.SHARE_PERIOD
761             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
762                 return None
763             
764             global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
765             
766             my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
767             my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
768             my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
769             my_share_count = my_unstale_count + my_orphan_count + my_doa_count
770             my_stale_count = my_orphan_count + my_doa_count
771             
772             my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
773             
774             my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
775                 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1)
776                 if share.hash in my_share_hashes)
777             actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp -
778                 tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp)
779             share_att_s = my_work / actual_time
780             
781             miner_hash_rates = {}
782             miner_dead_hash_rates = {}
783             datums, dt = local_rate_monitor.get_datums_in_last()
784             for datum in datums:
785                 miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
786                 if datum['dead']:
787                     miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
788             
789             return json.dumps(dict(
790                 my_hash_rates_in_last_hour=dict(
791                     note="DEPRECATED",
792                     nonstale=share_att_s,
793                     rewarded=share_att_s/(1 - global_stale_prop),
794                     actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
795                 ),
796                 my_share_counts_in_last_hour=dict(
797                     shares=my_share_count,
798                     unstale_shares=my_unstale_count,
799                     stale_shares=my_stale_count,
800                     orphan_stale_shares=my_orphan_count,
801                     doa_stale_shares=my_doa_count,
802                 ),
803                 my_stale_proportions_in_last_hour=dict(
804                     stale=my_stale_prop,
805                     orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
806                     dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
807                 ),
808                 miner_hash_rates=miner_hash_rates,
809                 miner_dead_hash_rates=miner_dead_hash_rates,
810             ))
811         
812         def get_peer_addresses():
813             return ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues())
814         
815         def get_uptime():
816             return json.dumps(time.time() - start_time)
817         
818         class WebInterface(resource.Resource):
819             def __init__(self, func, mime_type, *fields):
820                 self.func, self.mime_type, self.fields = func, mime_type, fields
821             
822             def render_GET(self, request):
823                 request.setHeader('Content-Type', self.mime_type)
824                 request.setHeader('Access-Control-Allow-Origin', '*')
825                 return self.func(*(request.args[field][0] for field in self.fields))
826         
827         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
828         web_root.putChild('users', WebInterface(get_users, 'application/json'))
829         web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
830         web_root.putChild('current_payouts', WebInterface(get_current_payouts, 'application/json'))
831         web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain', 'total'))
832         web_root.putChild('global_stats', WebInterface(get_global_stats, 'application/json'))
833         web_root.putChild('local_stats', WebInterface(get_local_stats, 'application/json'))
834         web_root.putChild('peer_addresses', WebInterface(get_peer_addresses, 'text/plain'))
835         web_root.putChild('payout_addr', WebInterface(lambda: json.dumps(bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)), 'application/json'))
836         web_root.putChild('recent_blocks', WebInterface(lambda: json.dumps(recent_blocks), 'application/json'))
837         web_root.putChild('uptime', WebInterface(get_uptime, 'application/json'))
838         if draw is not None:
839             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
840         
841         new_root = resource.Resource()
842         web_root.putChild('web', new_root)
843         
844         stat_log = []
845         if os.path.exists(os.path.join(datadir_path, 'stats')):
846             try:
847                 with open(os.path.join(datadir_path, 'stats'), 'rb') as f:
848                     stat_log = json.loads(f.read())
849             except:
850                 log.err(None, 'Error loading stats:')
851         def update_stat_log():
852             while stat_log and stat_log[0]['time'] < time.time() - 24*60*60:
853                 stat_log.pop(0)
854             
855             lookbehind = 3600//net.SHARE_PERIOD
856             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
857                 return None
858             
859             global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
860             (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
861             
862             miner_hash_rates = {}
863             miner_dead_hash_rates = {}
864             datums, dt = local_rate_monitor.get_datums_in_last()
865             for datum in datums:
866                 miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
867                 if datum['dead']:
868                     miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
869             
870             stat_log.append(dict(
871                 time=time.time(),
872                 pool_hash_rate=p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)/(1-global_stale_prop),
873                 pool_stale_prop=global_stale_prop,
874                 local_hash_rates=miner_hash_rates,
875                 local_dead_hash_rates=miner_dead_hash_rates,
876                 shares=shares,
877                 stale_shares=stale_orphan_shares + stale_doa_shares,
878                 stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares),
879                 current_payout=get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8,
880                 peers=dict(
881                     incoming=sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
882                     outgoing=sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming),
883                 ),
884                 attempts_to_share=bitcoin_data.target_to_average_attempts(tracker.shares[current_work.value['best_share_hash']].max_target),
885                 attempts_to_block=bitcoin_data.target_to_average_attempts(current_work.value['bits'].target),
886                 block_value=current_work2.value['subsidy']*1e-8,
887             ))
888             
889             with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
890                 f.write(json.dumps(stat_log))
891         task.LoopingCall(update_stat_log).start(5*60)
892         new_root.putChild('log', WebInterface(lambda: json.dumps(stat_log), 'application/json'))
893         
894         grapher = graphs.Grapher(os.path.join(datadir_path, 'rrd'))
895         web_root.putChild('graphs', grapher.get_resource())
896         def add_point():
897             if tracker.get_height(current_work.value['best_share_hash']) < 720:
898                 return
899             nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
900             poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
901             grapher.add_poolrate_point(poolrate, poolrate - nonstalerate)
902         task.LoopingCall(add_point).start(100)
903         
904         def attempt_listen():
905             try:
906                 reactor.listenTCP(worker_endpoint[1], server.Site(web_root), interface=worker_endpoint[0])
907             except error.CannotListenError, e:
908                 print >>sys.stderr, 'Error binding to worker port: %s. Retrying in 1 second.' % (e.socketError,)
909                 reactor.callLater(1, attempt_listen)
910             else:
911                 with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
912                     pass
913         attempt_listen()
914         
915         print '    ...success!'
916         print
917         
918         
919         @defer.inlineCallbacks
920         def work_poller():
921             while True:
922                 flag = factory.new_block.get_deferred()
923                 try:
924                     yield set_real_work1()
925                 except:
926                     log.err()
927                 yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
928         work_poller()
929         
930         
931         # done!
932         print 'Started successfully!'
933         print
934         
935         
936         if hasattr(signal, 'SIGALRM'):
937             signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
938                 sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
939             ))
940             signal.siginterrupt(signal.SIGALRM, False)
941             task.LoopingCall(signal.alarm, 30).start(1)
942         
943         if args.irc_announce:
944             from twisted.words.protocols import irc
945             class IRCClient(irc.IRCClient):
946                 nickname = 'p2pool%02i' % (random.randrange(100),)
947                 channel = '#p2pool' if net.NAME == 'bitcoin' else '#p2pool-alt'
948                 def lineReceived(self, line):
949                     print repr(line)
950                     irc.IRCClient.lineReceived(self, line)
951                 def signedOn(self):
952                     irc.IRCClient.signedOn(self)
953                     self.factory.resetDelay()
954                     self.join(self.channel)
955                     self.watch_id = tracker.verified.added.watch(self._new_share)
956                     self.announced_hashes = set()
957                     self.delayed_messages = {}
958                 def privmsg(self, user, channel, message):
959                     if channel == self.channel and message in self.delayed_messages:
960                         self.delayed_messages.pop(message).cancel()
961                 def _new_share(self, share):
962                     if share.pow_hash <= share.header['bits'].target and share.header_hash not in self.announced_hashes and abs(share.timestamp - time.time()) < 10*60:
963                         self.announced_hashes.add(share.header_hash)
964                         message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
965                         self.delayed_messages[message] = reactor.callLater(random.expovariate(1/5), lambda: (self.say(self.channel, message), self.delayed_messages.pop(message)))
966                 def connectionLost(self, reason):
967                     tracker.verified.added.unwatch(self.watch_id)
968                     print 'IRC connection lost:', reason.getErrorMessage()
969             class IRCClientFactory(protocol.ReconnectingClientFactory):
970                 protocol = IRCClient
971             reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())
972         
973         @defer.inlineCallbacks
974         def status_thread():
975             last_str = None
976             last_time = 0
977             while True:
978                 yield deferral.sleep(3)
979                 try:
980                     if time.time() > current_work2.value['last_update'] + 60:
981                         print >>sys.stderr, '''---> LOST CONTACT WITH BITCOIND for %s! Check that it isn't frozen or dead! <---''' % (math.format_dt(time.time() - current_work2.value['last_update']),)
982                     
983                     height = tracker.get_height(current_work.value['best_share_hash'])
984                     this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
985                         height,
986                         len(tracker.verified.shares),
987                         len(tracker.shares),
988                         len(p2p_node.peers),
989                         sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
990                     ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
991                     
992                     datums, dt = local_rate_monitor.get_datums_in_last()
993                     my_att_s = sum(datum['work']/dt for datum in datums)
994                     this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
995                         math.format(int(my_att_s)),
996                         math.format_dt(dt),
997                         math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95),
998                         math.format_dt(2**256 / tracker.shares[current_work.value['best_share_hash']].max_target / my_att_s) if my_att_s and current_work.value['best_share_hash'] else '???',
999                     )
1000                     
1001                     if height > 2:
1002                         (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
1003                         stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], min(720, height))
1004                         real_att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720)) / (1 - stale_prop)
1005                         
1006                         this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % (
1007                             shares, stale_orphan_shares, stale_doa_shares,
1008                             math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
1009                             math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
1010                             get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8, net.PARENT.SYMBOL,
1011                         )
1012                         this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
1013                             math.format(int(real_att_s)),
1014                             100*stale_prop,
1015                             math.format_dt(2**256 / current_work.value['bits'].target / real_att_s),
1016                         )
1017                     
1018                     if this_str != last_str or time.time() > last_time + 15:
1019                         print this_str
1020                         last_str = this_str
1021                         last_time = time.time()
1022                 except:
1023                     log.err()
1024         status_thread()
1025     except:
1026         log.err(None, 'Fatal error:')
1027         reactor.stop()
1028
1029 def run():
1030     class FixedArgumentParser(argparse.ArgumentParser):
1031         def _read_args_from_files(self, arg_strings):
1032             # expand arguments referencing files
1033             new_arg_strings = []
1034             for arg_string in arg_strings:
1035                 
1036                 # for regular arguments, just add them back into the list
1037                 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
1038                     new_arg_strings.append(arg_string)
1039                 
1040                 # replace arguments referencing files with the file content
1041                 else:
1042                     try:
1043                         args_file = open(arg_string[1:])
1044                         try:
1045                             arg_strings = []
1046                             for arg_line in args_file.read().splitlines():
1047                                 for arg in self.convert_arg_line_to_args(arg_line):
1048                                     arg_strings.append(arg)
1049                             arg_strings = self._read_args_from_files(arg_strings)
1050                             new_arg_strings.extend(arg_strings)
1051                         finally:
1052                             args_file.close()
1053                     except IOError:
1054                         err = sys.exc_info()[1]
1055                         self.error(str(err))
1056             
1057             # return the modified argument list
1058             return new_arg_strings
1059         
1060         def convert_arg_line_to_args(self, arg_line):
1061             return [arg for arg in arg_line.split() if arg.strip()]
1062     
1063     
1064     realnets=dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
1065     
1066     parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
1067     parser.add_argument('--version', action='version', version=p2pool.__version__)
1068     parser.add_argument('--net',
1069         help='use specified network (default: bitcoin)',
1070         action='store', choices=sorted(realnets), default='bitcoin', dest='net_name')
1071     parser.add_argument('--testnet',
1072         help='''use the network's testnet''',
1073         action='store_const', const=True, default=False, dest='testnet')
1074     parser.add_argument('--debug',
1075         help='enable debugging mode',
1076         action='store_const', const=True, default=False, dest='debug')
1077     parser.add_argument('-a', '--address',
1078         help='generate payouts to this address (default: <address requested from bitcoind>)',
1079         type=str, action='store', default=None, dest='address')
1080     parser.add_argument('--datadir',
1081         help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
1082         type=str, action='store', default=None, dest='datadir')
1083     parser.add_argument('--logfile',
1084         help='''log to this file (default: data/<NET>/log)''',
1085         type=str, action='store', default=None, dest='logfile')
1086     parser.add_argument('--merged',
1087         help='call getauxblock on this url to get work for merged mining (example: http://ncuser:ncpass@127.0.0.1:10332/)',
1088         type=str, action='append', default=[], dest='merged_urls')
1089     parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
1090         help='donate this percentage of work to author of p2pool (default: 0.5)',
1091         type=float, action='store', default=0.5, dest='donation_percentage')
1092     parser.add_argument('--irc-announce',
1093         help='announce any blocks found on irc://irc.freenode.net/#p2pool',
1094         action='store_true', default=False, dest='irc_announce')
1095     
1096     p2pool_group = parser.add_argument_group('p2pool interface')
1097     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
1098         help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
1099         type=int, action='store', default=None, dest='p2pool_port')
1100     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
1101         help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
1102         type=str, action='append', default=[], dest='p2pool_nodes')
1103     parser.add_argument('--disable-upnp',
1104         help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
1105         action='store_false', default=True, dest='upnp')
1106     
1107     worker_group = parser.add_argument_group('worker interface')
1108     worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT',
1109         help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
1110         type=str, action='store', default=None, dest='worker_endpoint')
1111     worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
1112         help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
1113         type=float, action='store', default=0, dest='worker_fee')
1114     
1115     bitcoind_group = parser.add_argument_group('bitcoind interface')
1116     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
1117         help='connect to this address (default: 127.0.0.1)',
1118         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
1119     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
1120         help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
1121         type=int, action='store', default=None, dest='bitcoind_rpc_port')
1122     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
1123         help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
1124         type=int, action='store', default=None, dest='bitcoind_p2p_port')
1125     
1126     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
1127         help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
1128         type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
1129     
1130     args = parser.parse_args()
1131     
1132     if args.debug:
1133         p2pool.DEBUG = True
1134     
1135     net_name = args.net_name + ('_testnet' if args.testnet else '')
1136     net = networks.nets[net_name]
1137     
1138     datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
1139     if not os.path.exists(datadir_path):
1140         os.makedirs(datadir_path)
1141     
1142     if len(args.bitcoind_rpc_userpass) > 2:
1143         parser.error('a maximum of two arguments are allowed')
1144     args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
1145     
1146     if args.bitcoind_rpc_password is None:
1147         if not hasattr(net.PARENT, 'CONF_FILE_FUNC'):
1148             parser.error('This network has no configuration file function. Manually enter your RPC password.')
1149         conf_path = net.PARENT.CONF_FILE_FUNC()
1150         if not os.path.exists(conf_path):
1151             parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
1152                 '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
1153                 '''\r\n'''
1154                 '''server=1\r\n'''
1155                 '''rpcpassword=%x''' % (conf_path, random.randrange(2**128)))
1156         with open(conf_path, 'rb') as f:
1157             cp = ConfigParser.RawConfigParser()
1158             cp.readfp(StringIO.StringIO('[x]\r\n' + f.read()))
1159             for conf_name, var_name, var_type in [
1160                 ('rpcuser', 'bitcoind_rpc_username', str),
1161                 ('rpcpassword', 'bitcoind_rpc_password', str),
1162                 ('rpcport', 'bitcoind_rpc_port', int),
1163                 ('port', 'bitcoind_p2p_port', int),
1164             ]:
1165                 if getattr(args, var_name) is None and cp.has_option('x', conf_name):
1166                     setattr(args, var_name, var_type(cp.get('x', conf_name)))
1167     
1168     if args.bitcoind_rpc_username is None:
1169         args.bitcoind_rpc_username = ''
1170     
1171     if args.bitcoind_rpc_port is None:
1172         args.bitcoind_rpc_port = net.PARENT.RPC_PORT
1173     
1174     if args.bitcoind_p2p_port is None:
1175         args.bitcoind_p2p_port = net.PARENT.P2P_PORT
1176     
1177     if args.p2pool_port is None:
1178         args.p2pool_port = net.P2P_PORT
1179     
1180     if args.worker_endpoint is None:
1181         worker_endpoint = '', net.WORKER_PORT
1182     elif ':' not in args.worker_endpoint:
1183         worker_endpoint = '', int(args.worker_endpoint)
1184     else:
1185         addr, port = args.worker_endpoint.rsplit(':', 1)
1186         worker_endpoint = addr, int(port)
1187     
1188     if args.address is not None:
1189         try:
1190             args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
1191         except Exception, e:
1192             parser.error('error parsing address: ' + repr(e))
1193     else:
1194         args.pubkey_hash = None
1195     
1196     def separate_url(url):
1197         s = urlparse.urlsplit(url)
1198         if '@' not in s.netloc:
1199             parser.error('merged url netloc must contain an "@"')
1200         userpass, new_netloc = s.netloc.rsplit('@', 1)
1201         return urlparse.urlunsplit(s._replace(netloc=new_netloc)), userpass
1202     merged_urls = map(separate_url, args.merged_urls)
1203     
1204     if args.logfile is None:
1205         args.logfile = os.path.join(datadir_path, 'log')
1206     
1207     logfile = logging.LogFile(args.logfile)
1208     pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
1209     sys.stdout = logging.AbortPipe(pipe)
1210     sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
1211     if hasattr(signal, "SIGUSR1"):
1212         def sigusr1(signum, frame):
1213             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
1214             logfile.reopen()
1215             print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
1216         signal.signal(signal.SIGUSR1, sigusr1)
1217     task.LoopingCall(logfile.reopen).start(5)
1218     
1219     reactor.callWhenRunning(main, args, net, datadir_path, merged_urls, worker_endpoint)
1220     reactor.run()