ed311ac282e7a4e83d47aa846cb78953510d28c4
[p2pool.git] / p2pool / main.py
1 from __future__ import division
2
3 import ConfigParser
4 import StringIO
5 import argparse
6 import os
7 import random
8 import struct
9 import sys
10 import time
11 import json
12 import signal
13 import traceback
14 import urlparse
15
16 from twisted.internet import defer, error, reactor, protocol, task
17 from twisted.web import server, resource
18 from twisted.python import log
19 from nattraverso import portmapper, ipdiscover
20
21 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
22 from bitcoin import worker_interface
23 from util import expiring_dict, jsonrpc, variable, deferral, math, logging, pack
24 from . import p2p, networks, graphs
25 import p2pool, p2pool.data as p2pool_data
26
27 @deferral.retry('Error getting work from bitcoind:', 3)
28 @defer.inlineCallbacks
29 def getwork(bitcoind):
30     try:
31         work = yield bitcoind.rpc_getmemorypool()
32     except jsonrpc.Error, e:
33         if e.code == -32601: # Method not found
34             print >>sys.stderr, 'Error: Bitcoin version too old! Upgrade to v0.5 or newer!'
35             raise deferral.RetrySilentlyException()
36         raise
37     packed_transactions = [x.decode('hex') for x in work['transactions']]
38     defer.returnValue(dict(
39         version=work['version'],
40         previous_block_hash=int(work['previousblockhash'], 16),
41         transactions=map(bitcoin_data.tx_type.unpack, packed_transactions),
42         merkle_branch=bitcoin_data.calculate_merkle_branch([0] + map(bitcoin_data.hash256, packed_transactions), 0),
43         subsidy=work['coinbasevalue'],
44         time=work['time'],
45         bits=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
46         coinbaseflags=work['coinbaseflags'].decode('hex') if 'coinbaseflags' in work else ''.join(x.decode('hex') for x in work['coinbaseaux'].itervalues()) if 'coinbaseaux' in work else '',
47     ))
48
49 @defer.inlineCallbacks
50 def main(args, net, datadir_path, merged_urls, worker_endpoint):
51     try:
52         print 'p2pool (version %s)' % (p2pool.__version__,)
53         print
54         try:
55             from . import draw
56         except ImportError:
57             draw = None
58             print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
59             print
60         
61         # connect to bitcoind over JSON-RPC and do initial getmemorypool
62         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
63         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
64         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
65         good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.PARENT.RPC_CHECK)(bitcoind)
66         if not good:
67             print >>sys.stderr, "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
68             return
69         temp_work = yield getwork(bitcoind)
70         print '    ...success!'
71         print '    Current block hash: %x' % (temp_work['previous_block_hash'],)
72         print
73         
74         # connect to bitcoind over bitcoin-p2p
75         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
76         factory = bitcoin_p2p.ClientFactory(net.PARENT)
77         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
78         yield factory.getProtocol() # waits until handshake is successful
79         print '    ...success!'
80         print
81         
82         print 'Determining payout script...'
83         if args.pubkey_hash is None:
84             address_path = os.path.join(datadir_path, 'cached_payout_address')
85             
86             if os.path.exists(address_path):
87                 with open(address_path, 'rb') as f:
88                     address = f.read().strip('\r\n')
89                 print '    Loaded cached address: %s...' % (address,)
90             else:
91                 address = None
92             
93             if address is not None:
94                 res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))()
95                 if not res['isvalid'] or not res['ismine']:
96                     print '    Cached address is either invalid or not controlled by local bitcoind!'
97                     address = None
98             
99             if address is None:
100                 print '    Getting payout address from bitcoind...'
101                 address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()
102             
103             with open(address_path, 'wb') as f:
104                 f.write(address)
105             
106             my_script = bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(address, net.PARENT))
107         else:
108             print '    ...Computing payout script from provided address...'
109             my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
110         print '    ...success! Payout script:', bitcoin_data.script2_to_human(my_script, net.PARENT)
111         print
112         
113         my_share_hashes = set()
114         my_doa_share_hashes = set()
115         
116         tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes)
117         shared_share_hashes = set()
118         ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
119         known_verified = set()
120         recent_blocks = []
121         print "Loading shares..."
122         for i, (mode, contents) in enumerate(ss.get_shares()):
123             if mode == 'share':
124                 if contents.hash in tracker.shares:
125                     continue
126                 shared_share_hashes.add(contents.hash)
127                 contents.time_seen = 0
128                 tracker.add(contents)
129                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
130                     print "    %i" % (len(tracker.shares),)
131             elif mode == 'verified_hash':
132                 known_verified.add(contents)
133             else:
134                 raise AssertionError()
135         print "    ...inserting %i verified shares..." % (len(known_verified),)
136         for h in known_verified:
137             if h not in tracker.shares:
138                 ss.forget_verified_share(h)
139                 continue
140             tracker.verified.add(tracker.shares[h])
141         print "    ...done loading %i shares!" % (len(tracker.shares),)
142         print
143         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
144         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
145         tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
146         
147         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
148         
149         pre_current_work = variable.Variable(None)
150         pre_merged_work = variable.Variable({})
151         # information affecting work that should trigger a long-polling update
152         current_work = variable.Variable(None)
153         # information affecting work that should not trigger a long-polling update
154         current_work2 = variable.Variable(None)
155         
156         requested = expiring_dict.ExpiringDict(300)
157         
158         print 'Initializing work...'
159         @defer.inlineCallbacks
160         def set_real_work1():
161             work = yield getwork(bitcoind)
162             current_work2.set(dict(
163                 time=work['time'],
164                 transactions=work['transactions'],
165                 merkle_branch=work['merkle_branch'],
166                 subsidy=work['subsidy'],
167                 clock_offset=time.time() - work['time'],
168                 last_update=time.time(),
169             )) # second set first because everything hooks on the first
170             pre_current_work.set(dict(
171                 version=work['version'],
172                 previous_block=work['previous_block_hash'],
173                 bits=work['bits'],
174                 coinbaseflags=work['coinbaseflags'],
175             ))
176         yield set_real_work1()
177         
178         if '\ngetblock ' in (yield bitcoind.rpc_help()):
179             height_cacher = deferral.DeferredCacher(defer.inlineCallbacks(lambda block_hash: defer.returnValue((yield bitcoind.rpc_getblock('%x' % (block_hash,)))['blockcount'])))
180             best_height_cached = variable.Variable((yield height_cacher(pre_current_work.value['previous_block'])))
181             def get_height_rel_highest(block_hash):
182                 this_height = height_cacher.call_now(block_hash, 0)
183                 best_height = height_cacher.call_now(pre_current_work.value['previous_block'], 0)
184                 best_height_cached.set(max(best_height_cached.value, this_height, best_height))
185                 return this_height - best_height_cached.value
186         else:
187             get_height_rel_highest = bitcoin_p2p.HeightTracker(bitcoind, factory).get_height_rel_highest
188         
189         def set_real_work2():
190             best, desired = tracker.think(get_height_rel_highest, pre_current_work.value['previous_block'], pre_current_work.value['bits'])
191             
192             t = dict(pre_current_work.value)
193             t['best_share_hash'] = best
194             t['mm_chains'] = pre_merged_work.value
195             current_work.set(t)
196             
197             t = time.time()
198             for peer2, share_hash in desired:
199                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
200                     continue
201                 last_request_time, count = requested.get(share_hash, (None, 0))
202                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
203                     continue
204                 potential_peers = set()
205                 for head in tracker.tails[share_hash]:
206                     potential_peers.update(peer_heads.get(head, set()))
207                 potential_peers = [peer for peer in potential_peers if peer.connected2]
208                 if count == 0 and peer2 is not None and peer2.connected2:
209                     peer = peer2
210                 else:
211                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
212                     if peer is None:
213                         continue
214                 
215                 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
216                 peer.send_getshares(
217                     hashes=[share_hash],
218                     parents=2000,
219                     stops=list(set(tracker.heads) | set(
220                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
221                     ))[:100],
222                 )
223                 requested[share_hash] = t, count + 1
224         pre_current_work.changed.watch(lambda _: set_real_work2())
225         pre_merged_work.changed.watch(lambda _: set_real_work2())
226         set_real_work2()
227         print '    ...success!'
228         print
229         
230         
231         @defer.inlineCallbacks
232         def set_merged_work(merged_url, merged_userpass):
233             merged_proxy = jsonrpc.Proxy(merged_url, (merged_userpass,))
234             while True:
235                 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged_proxy.rpc_getauxblock)()
236                 pre_merged_work.set(dict(pre_merged_work.value, **{auxblock['chainid']: dict(
237                     hash=int(auxblock['hash'], 16),
238                     target=pack.IntType(256).unpack(auxblock['target'].decode('hex')),
239                     merged_proxy=merged_proxy,
240                 )}))
241                 yield deferral.sleep(1)
242         for merged_url, merged_userpass in merged_urls:
243             set_merged_work(merged_url, merged_userpass)
244         
245         @pre_merged_work.changed.watch
246         def _(new_merged_work):
247             print 'Got new merged mining work!'
248         
249         # setup p2p logic and join p2pool network
250         
251         class Node(p2p.Node):
252             def handle_shares(self, shares, peer):
253                 if len(shares) > 5:
254                     print 'Processing %i shares from %s...' % (len(shares), '%s:%i' % peer.addr if peer is not None else None)
255                 
256                 new_count = 0
257                 for share in shares:
258                     if share.hash in tracker.shares:
259                         #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
260                         continue
261                     
262                     new_count += 1
263                     
264                     #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
265                     
266                     tracker.add(share)
267                 
268                 if shares and peer is not None:
269                     peer_heads.setdefault(shares[0].hash, set()).add(peer)
270                 
271                 if new_count:
272                     set_real_work2()
273                 
274                 if len(shares) > 5:
275                     print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
276             
277             def handle_share_hashes(self, hashes, peer):
278                 t = time.time()
279                 get_hashes = []
280                 for share_hash in hashes:
281                     if share_hash in tracker.shares:
282                         continue
283                     last_request_time, count = requested.get(share_hash, (None, 0))
284                     if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
285                         continue
286                     print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
287                     get_hashes.append(share_hash)
288                     requested[share_hash] = t, count + 1
289                 
290                 if hashes and peer is not None:
291                     peer_heads.setdefault(hashes[0], set()).add(peer)
292                 if get_hashes:
293                     peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
294             
295             def handle_get_shares(self, hashes, parents, stops, peer):
296                 parents = min(parents, 1000//len(hashes))
297                 stops = set(stops)
298                 shares = []
299                 for share_hash in hashes:
300                     for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))):
301                         if share.hash in stops:
302                             break
303                         shares.append(share)
304                 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
305                 peer.sendShares(shares)
306         
307         @tracker.verified.added.watch
308         def _(share):
309             if share.pow_hash <= share.header['bits'].target:
310                 if factory.conn.value is not None:
311                     factory.conn.value.send_block(block=share.as_block(tracker))
312                 else:
313                     print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Erp!'
314                 print
315                 print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %x' % (p2pool_data.format_hash(share.hash), share.header_hash)
316                 print
317                 recent_blocks.append({ 'ts': share.timestamp, 'hash': '%x' % (share.header_hash) })
318         
319         print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
320         
321         @defer.inlineCallbacks
322         def parse(x):
323             if ':' in x:
324                 ip, port = x.split(':')
325                 defer.returnValue(((yield reactor.resolve(ip)), int(port)))
326             else:
327                 defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT))
328         
329         addrs = {}
330         if os.path.exists(os.path.join(datadir_path, 'addrs.txt')):
331             try:
332                 addrs.update(dict(eval(x) for x in open(os.path.join(datadir_path, 'addrs.txt'))))
333             except:
334                 print >>sys.stderr, "error reading addrs"
335         for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
336             try:
337                 addr = yield addr_df
338                 if addr not in addrs:
339                     addrs[addr] = (0, time.time(), time.time())
340             except:
341                 log.err()
342         
343         connect_addrs = set()
344         for addr_df in map(parse, args.p2pool_nodes):
345             try:
346                 connect_addrs.add((yield addr_df))
347             except:
348                 log.err()
349         
350         p2p_node = Node(
351             best_share_hash_func=lambda: current_work.value['best_share_hash'],
352             port=args.p2pool_port,
353             net=net,
354             addr_store=addrs,
355             connect_addrs=connect_addrs,
356         )
357         p2p_node.start()
358         
359         def save_addrs():
360             open(os.path.join(datadir_path, 'addrs.txt'), 'w').writelines(repr(x) + '\n' for x in p2p_node.addr_store.iteritems())
361         task.LoopingCall(save_addrs).start(60)
362         
363         # send share when the chain changes to their chain
364         def work_changed(new_work):
365             #print 'Work changed:', new_work
366             shares = []
367             for share in tracker.get_chain(new_work['best_share_hash'], min(5, tracker.get_height(new_work['best_share_hash']))):
368                 if share.hash in shared_share_hashes:
369                     break
370                 shared_share_hashes.add(share.hash)
371                 shares.append(share)
372             
373             for peer in p2p_node.peers.itervalues():
374                 peer.sendShares([share for share in shares if share.peer is not peer])
375         
376         current_work.changed.watch(work_changed)
377         
378         def save_shares():
379             for share in tracker.get_chain(current_work.value['best_share_hash'], min(tracker.get_height(current_work.value['best_share_hash']), 2*net.CHAIN_LENGTH)):
380                 ss.add_share(share)
381                 if share.hash in tracker.verified.shares:
382                     ss.add_verified_hash(share.hash)
383         task.LoopingCall(save_shares).start(60)
384         
385         print '    ...success!'
386         print
387         
388         start_time = time.time()
389         
390         @defer.inlineCallbacks
391         def upnp_thread():
392             while True:
393                 try:
394                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
395                     if is_lan:
396                         pm = yield portmapper.get_port_mapper()
397                         yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
398                 except defer.TimeoutError:
399                     pass
400                 except:
401                     if p2pool.DEBUG:
402                         log.err(None, "UPnP error:")
403                 yield deferral.sleep(random.expovariate(1/120))
404         
405         if args.upnp:
406             upnp_thread()
407         
408         # start listening for workers with a JSON-RPC server
409         
410         print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1])
411         
412         if os.path.exists(os.path.join(datadir_path, 'vip_pass')):
413             with open(os.path.join(datadir_path, 'vip_pass'), 'rb') as f:
414                 vip_pass = f.read().strip('\r\n')
415         else:
416             vip_pass = '%016x' % (random.randrange(2**64),)
417             with open(os.path.join(datadir_path, 'vip_pass'), 'wb') as f:
418                 f.write(vip_pass)
419         print '    Worker password:', vip_pass, '(only required for generating graphs)'
420         
421         # setup worker logic
422         
423         removed_unstales_var = variable.Variable((0, 0, 0))
424         @tracker.verified.removed.watch
425         def _(share):
426             if share.hash in my_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
427                 assert share.share_data['stale_info'] in [0, 253, 254] # we made these shares in this instance
428                 removed_unstales_var.set((
429                     removed_unstales_var.value[0] + 1,
430                     removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 253 else 0),
431                     removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 254 else 0),
432                 ))
433         
434         removed_doa_unstales_var = variable.Variable(0)
435         @tracker.verified.removed.watch
436         def _(share):
437             if share.hash in my_doa_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
438                 removed_doa_unstales.set(removed_doa_unstales.value + 1)
439         
440         def get_stale_counts():
441             '''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)'''
442             my_shares = len(my_share_hashes)
443             my_doa_shares = len(my_doa_share_hashes)
444             delta = tracker.verified.get_delta(current_work.value['best_share_hash'])
445             my_shares_in_chain = delta.my_count + removed_unstales_var.value[0]
446             my_doa_shares_in_chain = delta.my_doa_count + removed_doa_unstales_var.value
447             orphans_recorded_in_chain = delta.my_orphan_announce_count + removed_unstales_var.value[1]
448             doas_recorded_in_chain = delta.my_dead_announce_count + removed_unstales_var.value[2]
449             
450             my_shares_not_in_chain = my_shares - my_shares_in_chain
451             my_doa_shares_not_in_chain = my_doa_shares - my_doa_shares_in_chain
452             
453             return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
454         
455         
456         local_rate_monitor = math.RateMonitor(10*60)
457         
458         class WorkerBridge(worker_interface.WorkerBridge):
459             def __init__(self):
460                 worker_interface.WorkerBridge.__init__(self)
461                 self.new_work_event = current_work.changed
462                 self.recent_shares_ts_work = []
463             
464             def _get_payout_script_from_username(self, user):
465                 if user is None:
466                     return None
467                 try:
468                     pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, net.PARENT)
469                 except: # XXX blah
470                     return None
471                 return bitcoin_data.pubkey_hash_to_script2(pubkey_hash)
472             
473             def preprocess_request(self, request):
474                 payout_script = self._get_payout_script_from_username(request.getUser())
475                 if payout_script is None or random.uniform(0, 100) < args.worker_fee:
476                     payout_script = my_script
477                 return payout_script,
478             
479             def get_work(self, payout_script):
480                 if len(p2p_node.peers) == 0 and net.PERSIST:
481                     raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
482                 if current_work.value['best_share_hash'] is None and net.PERSIST:
483                     raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
484                 if time.time() > current_work2.value['last_update'] + 60:
485                     raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
486                 
487                 if current_work.value['mm_chains']:
488                     tree, size = bitcoin_data.make_auxpow_tree(current_work.value['mm_chains'])
489                     mm_hashes = [current_work.value['mm_chains'].get(tree.get(i), dict(hash=0))['hash'] for i in xrange(size)]
490                     mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(dict(
491                         merkle_root=bitcoin_data.merkle_hash(mm_hashes),
492                         size=size,
493                         nonce=0,
494                     ))
495                     mm_later = [(aux_work, mm_hashes.index(aux_work['hash']), mm_hashes) for chain_id, aux_work in current_work.value['mm_chains'].iteritems()]
496                 else:
497                     mm_data = ''
498                     mm_later = []
499                 
500                 share_info, generate_tx = p2pool_data.generate_transaction(
501                     tracker=tracker,
502                     share_data=dict(
503                         previous_share_hash=current_work.value['best_share_hash'],
504                         coinbase=(mm_data + current_work.value['coinbaseflags'])[:100],
505                         nonce=struct.pack('<Q', random.randrange(2**64)),
506                         new_script=payout_script,
507                         subsidy=current_work2.value['subsidy'],
508                         donation=math.perfect_round(65535*args.donation_percentage/100),
509                         stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
510                             253 if orphans > orphans_recorded_in_chain else
511                             254 if doas > doas_recorded_in_chain else
512                             0
513                         )(*get_stale_counts()),
514                     ),
515                     block_target=current_work.value['bits'].target,
516                     desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
517                     net=net,
518                 )
519                 
520                 target = 2**256//2**32 - 1
521                 if len(self.recent_shares_ts_work) == 50:
522                     hash_rate = sum(work for ts, work in self.recent_shares_ts_work)//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
523                     target = min(target, 2**256//(hash_rate * 5))
524                 target = max(target, share_info['bits'].target)
525                 for aux_work in current_work.value['mm_chains'].itervalues():
526                     target = max(target, aux_work['target'])
527                 
528                 transactions = [generate_tx] + list(current_work2.value['transactions'])
529                 merkle_root = bitcoin_data.check_merkle_branch(bitcoin_data.hash256(bitcoin_data.tx_type.pack(generate_tx)), 0, current_work2.value['merkle_branch'])
530                 
531                 getwork_time = time.time()
532                 merkle_branch = current_work2.value['merkle_branch']
533                 
534                 print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % (
535                     bitcoin_data.target_to_difficulty(target),
536                     bitcoin_data.target_to_difficulty(share_info['bits'].target),
537                     current_work2.value['subsidy']*1e-8, net.PARENT.SYMBOL,
538                     len(current_work2.value['transactions']),
539                 )
540                 
541                 ba = bitcoin_getwork.BlockAttempt(
542                     version=current_work.value['version'],
543                     previous_block=current_work.value['previous_block'],
544                     merkle_root=merkle_root,
545                     timestamp=current_work2.value['time'],
546                     bits=current_work.value['bits'],
547                     share_target=target,
548                 )
549                 
550                 def got_response(header, request):
551                     assert header['merkle_root'] == merkle_root
552                     
553                     pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
554                     on_time = current_work.value['best_share_hash'] == share_info['share_data']['previous_share_hash']
555                     
556                     try:
557                         if pow_hash <= header['bits'].target or p2pool.DEBUG:
558                             @deferral.retry('Error submitting primary block: (will retry)', 10, 10)
559                             def submit_block():
560                                 if factory.conn.value is None:
561                                     print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Hash: %x' % (bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)),)
562                                     raise deferral.RetrySilentlyException()
563                                 factory.conn.value.send_block(block=dict(header=header, txs=transactions))
564                             submit_block()
565                             if pow_hash <= header['bits'].target:
566                                 print
567                                 print 'GOT BLOCK FROM MINER! Passing to bitcoind! bitcoin: %x' % (bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)),)
568                                 print
569                                 recent_blocks.append({ 'ts': time.time(), 'hash': '%x' % (bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)),) })
570                     except:
571                         log.err(None, 'Error while processing potential block:')
572                     
573                     for aux_work, index, hashes in mm_later:
574                         try:
575                             if pow_hash <= aux_work['target'] or p2pool.DEBUG:
576                                 df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)(
577                                     pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
578                                     bitcoin_data.aux_pow_type.pack(dict(
579                                         merkle_tx=dict(
580                                             tx=transactions[0],
581                                             block_hash=bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)),
582                                             merkle_branch=merkle_branch,
583                                             index=0,
584                                         ),
585                                         merkle_branch=bitcoin_data.calculate_merkle_branch(hashes, index),
586                                         index=index,
587                                         parent_block_header=header,
588                                     )).encode('hex'),
589                                 )
590                                 @df.addCallback
591                                 def _(result):
592                                     if result != (pow_hash <= aux_work['target']):
593                                         print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
594                                     else:
595                                         print 'Merged block submittal result: %s' % (result,)
596                                 @df.addErrback
597                                 def _(err):
598                                     log.err(err, 'Error submitting merged block:')
599                         except:
600                             log.err(None, 'Error while processing merged mining POW:')
601                     
602                     if pow_hash <= share_info['bits'].target:
603                         share = p2pool_data.Share(net, header, share_info, merkle_branch=merkle_branch, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
604                         print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
605                             request.getUser(),
606                             p2pool_data.format_hash(share.hash),
607                             p2pool_data.format_hash(share.previous_hash),
608                             time.time() - getwork_time,
609                             ' DEAD ON ARRIVAL' if not on_time else '',
610                         )
611                         my_share_hashes.add(share.hash)
612                         if not on_time:
613                             my_doa_share_hashes.add(share.hash)
614                         
615                         tracker.add(share)
616                         if not p2pool.DEBUG:
617                             tracker.verified.add(share)
618                         set_real_work2()
619                         
620                         try:
621                             if pow_hash <= header['bits'].target or p2pool.DEBUG:
622                                 for peer in p2p_node.peers.itervalues():
623                                     peer.sendShares([share])
624                                 shared_share_hashes.add(share.hash)
625                         except:
626                             log.err(None, 'Error forwarding block solution:')
627                     
628                     if pow_hash <= target:
629                         reactor.callLater(1, grapher.add_localrate_point, bitcoin_data.target_to_average_attempts(target), not on_time)
630                         if request.getPassword() == vip_pass:
631                             reactor.callLater(1, grapher.add_localminer_point, request.getUser(), bitcoin_data.target_to_average_attempts(target), not on_time)
632                         self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
633                         while len(self.recent_shares_ts_work) > 50:
634                             self.recent_shares_ts_work.pop(0)
635                         local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=request.getUser()))
636                     
637                     
638                     if pow_hash > target:
639                         print 'Worker %s submitted share with hash > target:' % (request.getUser(),)
640                         print '    Hash:   %56x' % (pow_hash,)
641                         print '    Target: %56x' % (target,)
642                     
643                     return on_time
644                 
645                 return ba, got_response
646         
647         web_root = resource.Resource()
648         worker_interface.WorkerInterface(WorkerBridge()).attach_to(web_root)
649         
650         def get_rate():
651             if tracker.get_height(current_work.value['best_share_hash']) < 720:
652                 return json.dumps(None)
653             return json.dumps(p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
654                 / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720)))
655         
656         def get_users():
657             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
658             weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
659             res = {}
660             for script in sorted(weights, key=lambda s: weights[s]):
661                 res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
662             return json.dumps(res)
663         
664         def get_current_txouts():
665             share = tracker.shares[current_work.value['best_share_hash']]
666             share_info, gentx = p2pool_data.generate_transaction(tracker, share.share_info['share_data'], share.header['bits'].target, share.share_info['timestamp'], share.net)
667             return dict((out['script'], out['value']) for out in gentx['tx_outs'])
668         
669         def get_current_scaled_txouts(scale, trunc=0):
670             txouts = get_current_txouts()
671             total = sum(txouts.itervalues())
672             results = dict((script, value*scale//total) for script, value in txouts.iteritems())
673             if trunc > 0:
674                 total_random = 0
675                 random_set = set()
676                 for s in sorted(results, key=results.__getitem__):
677                     if results[s] >= trunc:
678                         break
679                     total_random += results[s]
680                     random_set.add(s)
681                 if total_random:
682                     winner = math.weighted_choice((script, results[script]) for script in random_set)
683                     for script in random_set:
684                         del results[script]
685                     results[winner] = total_random
686             if sum(results.itervalues()) < int(scale):
687                 results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
688             return results
689         
690         def get_current_payouts():
691             return json.dumps(dict((bitcoin_data.script2_to_human(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems()))
692         
693         def get_patron_sendmany(this):
694             try:
695                 if '/' in this:
696                     this, trunc = this.split('/', 1)
697                 else:
698                     trunc = '0.01'
699                 return json.dumps(dict(
700                     (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
701                     for script, value in get_current_scaled_txouts(scale=int(float(this)*1e8), trunc=int(float(trunc)*1e8)).iteritems()
702                     if bitcoin_data.script2_to_address(script, net.PARENT) is not None
703                 ))
704             except:
705                 return json.dumps(None)
706         
707         def get_global_stats():
708             # averaged over last hour
709             lookbehind = 3600//net.SHARE_PERIOD
710             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
711                 return None
712             
713             nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
714             stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
715             return json.dumps(dict(
716                 pool_nonstale_hash_rate=nonstale_hash_rate,
717                 pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
718                 pool_stale_prop=stale_prop,
719             ))
720         
721         def get_local_stats():
722             lookbehind = 3600//net.SHARE_PERIOD
723             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
724                 return None
725             
726             global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
727             
728             my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
729             my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
730             my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
731             my_share_count = my_unstale_count + my_orphan_count + my_doa_count
732             my_stale_count = my_orphan_count + my_doa_count
733             
734             my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
735             
736             my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
737                 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1)
738                 if share.hash in my_share_hashes)
739             actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp -
740                 tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp)
741             share_att_s = my_work / actual_time
742             
743             miner_hash_rates = {}
744             miner_dead_hash_rates = {}
745             datums, dt = local_rate_monitor.get_datums_in_last()
746             for datum in datums:
747                 miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
748                 if datum['dead']:
749                     miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
750             
751             return json.dumps(dict(
752                 my_hash_rates_in_last_hour=dict(
753                     note="DEPRECATED",
754                     nonstale=share_att_s,
755                     rewarded=share_att_s/(1 - global_stale_prop),
756                     actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
757                 ),
758                 my_share_counts_in_last_hour=dict(
759                     shares=my_share_count,
760                     unstale_shares=my_unstale_count,
761                     stale_shares=my_stale_count,
762                     orphan_stale_shares=my_orphan_count,
763                     doa_stale_shares=my_doa_count,
764                 ),
765                 my_stale_proportions_in_last_hour=dict(
766                     stale=my_stale_prop,
767                     orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
768                     dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
769                 ),
770                 miner_hash_rates=miner_hash_rates,
771                 miner_dead_hash_rates=miner_dead_hash_rates,
772             ))
773         
774         def get_peer_addresses():
775             return ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues())
776         
777         def get_uptime():
778             return json.dumps(time.time() - start_time)
779         
780         class WebInterface(resource.Resource):
781             def __init__(self, func, mime_type, *fields):
782                 self.func, self.mime_type, self.fields = func, mime_type, fields
783             
784             def render_GET(self, request):
785                 request.setHeader('Content-Type', self.mime_type)
786                 request.setHeader('Access-Control-Allow-Origin', '*')
787                 return self.func(*(request.args[field][0] for field in self.fields))
788         
789         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
790         web_root.putChild('users', WebInterface(get_users, 'application/json'))
791         web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
792         web_root.putChild('current_payouts', WebInterface(get_current_payouts, 'application/json'))
793         web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain', 'total'))
794         web_root.putChild('global_stats', WebInterface(get_global_stats, 'application/json'))
795         web_root.putChild('local_stats', WebInterface(get_local_stats, 'application/json'))
796         web_root.putChild('peer_addresses', WebInterface(get_peer_addresses, 'text/plain'))
797         web_root.putChild('payout_addr', WebInterface(lambda: json.dumps(bitcoin_data.script2_to_human(my_script, net.PARENT)), 'application/json'))
798         web_root.putChild('recent_blocks', WebInterface(lambda: json.dumps(recent_blocks), 'application/json'))
799         web_root.putChild('uptime', WebInterface(get_uptime, 'application/json'))
800         if draw is not None:
801             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
802         
803         new_root = resource.Resource()
804         web_root.putChild('web', new_root)
805         
806         stat_log = []
807         if os.path.exists(os.path.join(datadir_path, 'stats')):
808             try:
809                 with open(os.path.join(datadir_path, 'stats'), 'rb') as f:
810                     stat_log = json.loads(f.read())
811             except:
812                 log.err(None, 'Error loading stats:')
813         def update_stat_log():
814             while stat_log and stat_log[0]['time'] < time.time() - 24*60*60:
815                 stat_log.pop(0)
816             
817             lookbehind = 3600//net.SHARE_PERIOD
818             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
819                 return None
820             
821             global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
822             (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
823             
824             miner_hash_rates = {}
825             miner_dead_hash_rates = {}
826             datums, dt = local_rate_monitor.get_datums_in_last()
827             for datum in datums:
828                 miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
829                 if datum['dead']:
830                     miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
831             
832             stat_log.append(dict(
833                 time=time.time(),
834                 pool_hash_rate=p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)/(1-global_stale_prop),
835                 pool_stale_prop=global_stale_prop,
836                 local_hash_rates=miner_hash_rates,
837                 local_dead_hash_rates=miner_dead_hash_rates,
838                 shares=shares,
839                 stale_shares=stale_orphan_shares + stale_doa_shares,
840                 stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares),
841             ))
842             
843             with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
844                 f.write(json.dumps(stat_log))
845         task.LoopingCall(update_stat_log).start(5*60)
846         new_root.putChild('log', WebInterface(lambda: json.dumps(stat_log), 'application/json'))
847         
848         grapher = graphs.Grapher(os.path.join(datadir_path, 'rrd'))
849         web_root.putChild('graphs', grapher.get_resource())
850         def add_point():
851             if tracker.get_height(current_work.value['best_share_hash']) < 720:
852                 return
853             nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
854             poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
855             grapher.add_poolrate_point(poolrate, poolrate - nonstalerate)
856         task.LoopingCall(add_point).start(100)
857         
858         def attempt_listen():
859             try:
860                 reactor.listenTCP(worker_endpoint[1], server.Site(web_root), interface=worker_endpoint[0])
861             except error.CannotListenError, e:
862                 print >>sys.stderr, 'Error binding to worker port: %s. Retrying in 1 second.' % (e.socketError,)
863                 reactor.callLater(1, attempt_listen)
864             else:
865                 with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
866                     pass
867         attempt_listen()
868         
869         print '    ...success!'
870         print
871         
872         
873         @defer.inlineCallbacks
874         def work_poller():
875             while True:
876                 flag = factory.new_block.get_deferred()
877                 try:
878                     yield set_real_work1()
879                 except:
880                     log.err()
881                 yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
882         work_poller()
883         
884         
885         # done!
886         print 'Started successfully!'
887         print
888         
889         
890         if hasattr(signal, 'SIGALRM'):
891             signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
892                 sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
893             ))
894             signal.siginterrupt(signal.SIGALRM, False)
895             task.LoopingCall(signal.alarm, 30).start(1)
896         
897         if args.irc_announce:
898             from twisted.words.protocols import irc
899             class IRCClient(irc.IRCClient):
900                 nickname = 'p2pool'
901                 def lineReceived(self, line):
902                     print repr(line)
903                     irc.IRCClient.lineReceived(self, line)
904                 def signedOn(self):
905                     irc.IRCClient.signedOn(self)
906                     self.factory.resetDelay()
907                     self.join('#p2pool')
908                     self.watch_id = tracker.verified.added.watch(self._new_share)
909                     self.announced_hashes = set()
910                 def _new_share(self, share):
911                     if share.pow_hash <= share.header['bits'].target and share.header_hash not in self.announced_hashes and abs(share.timestamp - time.time()) < 10*60:
912                         self.announced_hashes.add(share.header_hash)
913                         self.say('#p2pool', '\x02BLOCK FOUND by %s! http://blockexplorer.com/block/%064x' % (bitcoin_data.script2_to_address(share.share_data['new_script'], net.PARENT), share.header_hash))
914                 def connectionLost(self, reason):
915                     tracker.verified.added.unwatch(self.watch_id)
916                     print 'IRC connection lost:', reason.getErrorMessage()
917             class IRCClientFactory(protocol.ReconnectingClientFactory):
918                 protocol = IRCClient
919             reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())
920         
921         @defer.inlineCallbacks
922         def status_thread():
923             last_str = None
924             last_time = 0
925             while True:
926                 yield deferral.sleep(3)
927                 try:
928                     if time.time() > current_work2.value['last_update'] + 60:
929                         print >>sys.stderr, '''---> LOST CONTACT WITH BITCOIND for %s! Check that it isn't frozen or dead! <---''' % (math.format_dt(time.time() - current_work2.value['last_update']),)
930                     
931                     height = tracker.get_height(current_work.value['best_share_hash'])
932                     this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
933                         height,
934                         len(tracker.verified.shares),
935                         len(tracker.shares),
936                         len(p2p_node.peers),
937                         sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
938                     ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
939                     
940                     datums, dt = local_rate_monitor.get_datums_in_last()
941                     my_att_s = sum(datum['work']/dt for datum in datums)
942                     this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
943                         math.format(int(my_att_s)),
944                         math.format_dt(dt),
945                         math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95),
946                         math.format_dt(2**256 / tracker.shares[current_work.value['best_share_hash']].target / my_att_s) if my_att_s else '???',
947                     )
948                     
949                     if height > 2:
950                         (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
951                         stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], min(720, height))
952                         real_att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720)) / (1 - stale_prop)
953                         
954                         this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % (
955                             shares, stale_orphan_shares, stale_doa_shares,
956                             math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
957                             math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
958                             get_current_txouts().get(my_script, 0)*1e-8, net.PARENT.SYMBOL,
959                         )
960                         this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
961                             math.format(int(real_att_s)),
962                             100*stale_prop,
963                             math.format_dt(2**256 / current_work.value['bits'].target / real_att_s),
964                         )
965                     
966                     if this_str != last_str or time.time() > last_time + 15:
967                         print this_str
968                         last_str = this_str
969                         last_time = time.time()
970                 except:
971                     log.err()
972         status_thread()
973     except:
974         log.err(None, 'Fatal error:')
975         reactor.stop()
976
977 def run():
978     class FixedArgumentParser(argparse.ArgumentParser):
979         def _read_args_from_files(self, arg_strings):
980             # expand arguments referencing files
981             new_arg_strings = []
982             for arg_string in arg_strings:
983                 
984                 # for regular arguments, just add them back into the list
985                 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
986                     new_arg_strings.append(arg_string)
987                 
988                 # replace arguments referencing files with the file content
989                 else:
990                     try:
991                         args_file = open(arg_string[1:])
992                         try:
993                             arg_strings = []
994                             for arg_line in args_file.read().splitlines():
995                                 for arg in self.convert_arg_line_to_args(arg_line):
996                                     arg_strings.append(arg)
997                             arg_strings = self._read_args_from_files(arg_strings)
998                             new_arg_strings.extend(arg_strings)
999                         finally:
1000                             args_file.close()
1001                     except IOError:
1002                         err = sys.exc_info()[1]
1003                         self.error(str(err))
1004             
1005             # return the modified argument list
1006             return new_arg_strings
1007         
1008         def convert_arg_line_to_args(self, arg_line):
1009             return [arg for arg in arg_line.split() if arg.strip()]
1010     
1011     
1012     realnets=dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
1013     
1014     parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
1015     parser.add_argument('--version', action='version', version=p2pool.__version__)
1016     parser.add_argument('--net',
1017         help='use specified network (default: bitcoin)',
1018         action='store', choices=sorted(realnets), default='bitcoin', dest='net_name')
1019     parser.add_argument('--testnet',
1020         help='''use the network's testnet''',
1021         action='store_const', const=True, default=False, dest='testnet')
1022     parser.add_argument('--debug',
1023         help='enable debugging mode',
1024         action='store_const', const=True, default=False, dest='debug')
1025     parser.add_argument('-a', '--address',
1026         help='generate payouts to this address (default: <address requested from bitcoind>)',
1027         type=str, action='store', default=None, dest='address')
1028     parser.add_argument('--datadir',
1029         help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
1030         type=str, action='store', default=None, dest='datadir')
1031     parser.add_argument('--logfile',
1032         help='''log to this file (default: data/<NET>/log)''',
1033         type=str, action='store', default=None, dest='logfile')
1034     parser.add_argument('--merged',
1035         help='call getauxblock on this url to get work for merged mining (example: http://ncuser:ncpass@127.0.0.1:10332/)',
1036         type=str, action='append', default=[], dest='merged_urls')
1037     parser.add_argument('--merged-url',
1038         help='DEPRECATED, use --merged',
1039         type=str, action='store', default=None, dest='merged_url')
1040     parser.add_argument('--merged-userpass',
1041         help='DEPRECATED, use --merged',
1042         type=str, action='store', default=None, dest='merged_userpass')
1043     parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
1044         help='donate this percentage of work to author of p2pool (default: 0.5)',
1045         type=float, action='store', default=0.5, dest='donation_percentage')
1046     parser.add_argument('--irc-announce',
1047         help='announce any blocks found on irc://irc.freenode.net/#p2pool',
1048         action='store_true', default=False, dest='irc_announce')
1049     
1050     p2pool_group = parser.add_argument_group('p2pool interface')
1051     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
1052         help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
1053         type=int, action='store', default=None, dest='p2pool_port')
1054     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
1055         help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
1056         type=str, action='append', default=[], dest='p2pool_nodes')
1057     parser.add_argument('--disable-upnp',
1058         help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
1059         action='store_false', default=True, dest='upnp')
1060     
1061     worker_group = parser.add_argument_group('worker interface')
1062     worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT',
1063         help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
1064         type=str, action='store', default=None, dest='worker_endpoint')
1065     worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
1066         help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
1067         type=float, action='store', default=0, dest='worker_fee')
1068     
1069     bitcoind_group = parser.add_argument_group('bitcoind interface')
1070     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
1071         help='connect to this address (default: 127.0.0.1)',
1072         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
1073     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
1074         help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
1075         type=int, action='store', default=None, dest='bitcoind_rpc_port')
1076     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
1077         help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
1078         type=int, action='store', default=None, dest='bitcoind_p2p_port')
1079     
1080     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
1081         help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
1082         type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
1083     
1084     args = parser.parse_args()
1085     
1086     if args.debug:
1087         p2pool.DEBUG = True
1088     
1089     net_name = args.net_name + ('_testnet' if args.testnet else '')
1090     net = networks.nets[net_name]
1091     
1092     datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
1093     if not os.path.exists(datadir_path):
1094         os.makedirs(datadir_path)
1095     
1096     if len(args.bitcoind_rpc_userpass) > 2:
1097         parser.error('a maximum of two arguments are allowed')
1098     args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
1099     
1100     if args.bitcoind_rpc_password is None:
1101         if not hasattr(net.PARENT, 'CONF_FILE_FUNC'):
1102             parser.error('This network has no configuration file function. Manually enter your RPC password.')
1103         conf_path = net.PARENT.CONF_FILE_FUNC()
1104         if not os.path.exists(conf_path):
1105             parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
1106                 '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
1107                 '''\r\n'''
1108                 '''server=1\r\n'''
1109                 '''rpcpassword=%x''' % (conf_path, random.randrange(2**128)))
1110         with open(conf_path, 'rb') as f:
1111             cp = ConfigParser.RawConfigParser()
1112             cp.readfp(StringIO.StringIO('[x]\r\n' + f.read()))
1113             for conf_name, var_name, var_type in [
1114                 ('rpcuser', 'bitcoind_rpc_username', str),
1115                 ('rpcpassword', 'bitcoind_rpc_password', str),
1116                 ('rpcport', 'bitcoind_rpc_port', int),
1117                 ('port', 'bitcoind_p2p_port', int),
1118             ]:
1119                 if getattr(args, var_name) is None and cp.has_option('x', conf_name):
1120                     setattr(args, var_name, var_type(cp.get('x', conf_name)))
1121     
1122     if args.bitcoind_rpc_username is None:
1123         args.bitcoind_rpc_username = ''
1124     
1125     if args.bitcoind_rpc_port is None:
1126         args.bitcoind_rpc_port = net.PARENT.RPC_PORT
1127     
1128     if args.bitcoind_p2p_port is None:
1129         args.bitcoind_p2p_port = net.PARENT.P2P_PORT
1130     
1131     if args.p2pool_port is None:
1132         args.p2pool_port = net.P2P_PORT
1133     
1134     if args.worker_endpoint is None:
1135         worker_endpoint = '', net.WORKER_PORT
1136     elif ':' not in args.worker_endpoint:
1137         worker_endpoint = '', int(args.worker_endpoint)
1138     else:
1139         addr, port = args.worker_endpoint.rsplit(':', 1)
1140         worker_endpoint = addr, int(port)
1141     
1142     if args.address is not None:
1143         try:
1144             args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
1145         except Exception, e:
1146             parser.error('error parsing address: ' + repr(e))
1147     else:
1148         args.pubkey_hash = None
1149     
1150     def separate_url(url):
1151         s = urlparse.urlsplit(url)
1152         if '@' not in s.netloc:
1153             parser.error('merged url netloc must contain an "@"')
1154         userpass, new_netloc = s.netloc.rsplit('@', 1)
1155         return urlparse.urlunsplit(s._replace(netloc=new_netloc)), userpass
1156     merged_urls = map(separate_url, args.merged_urls)
1157     
1158     if args.merged_url is not None or args.merged_userpass is not None:
1159         print '--merged-url and --merged-userpass are deprecated! Use --merged http://USER:PASS@HOST:PORT/ instead!'
1160         print 'Pausing 10 seconds...'
1161         time.sleep(10)
1162         
1163         if args.merged_url is None or args.merged_userpass is None:
1164             parser.error('must specify both --merged-url and --merged-userpass')
1165         
1166         merged_urls = merged_urls + [(args.merged_url, args.merged_userpass)]
1167     
1168     
1169     if args.logfile is None:
1170         args.logfile = os.path.join(datadir_path, 'log')
1171     
1172     logfile = logging.LogFile(args.logfile)
1173     pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
1174     sys.stdout = logging.AbortPipe(pipe)
1175     sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
1176     if hasattr(signal, "SIGUSR1"):
1177         def sigusr1(signum, frame):
1178             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
1179             logfile.reopen()
1180             print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
1181         signal.signal(signal.SIGUSR1, sigusr1)
1182     task.LoopingCall(logfile.reopen).start(5)
1183     
1184     reactor.callWhenRunning(main, args, net, datadir_path, merged_urls, worker_endpoint)
1185     reactor.run()