only compute header_hash once in got_response
[p2pool.git] / p2pool / main.py
1 from __future__ import division
2
3 import ConfigParser
4 import StringIO
5 import argparse
6 import os
7 import random
8 import struct
9 import sys
10 import time
11 import json
12 import signal
13 import traceback
14 import urlparse
15
16 from twisted.internet import defer, error, reactor, protocol, task
17 from twisted.web import server, resource
18 from twisted.python import log
19 from nattraverso import portmapper, ipdiscover
20
21 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
22 from bitcoin import worker_interface
23 from util import expiring_dict, jsonrpc, variable, deferral, math, logging, pack
24 from . import p2p, networks, graphs
25 import p2pool, p2pool.data as p2pool_data
26
27 @deferral.retry('Error getting work from bitcoind:', 3)
28 @defer.inlineCallbacks
29 def getwork(bitcoind):
30     try:
31         work = yield bitcoind.rpc_getmemorypool()
32     except jsonrpc.Error, e:
33         if e.code == -32601: # Method not found
34             print >>sys.stderr, 'Error: Bitcoin version too old! Upgrade to v0.5 or newer!'
35             raise deferral.RetrySilentlyException()
36         raise
37     packed_transactions = [x.decode('hex') for x in work['transactions']]
38     defer.returnValue(dict(
39         version=work['version'],
40         previous_block_hash=int(work['previousblockhash'], 16),
41         transactions=map(bitcoin_data.tx_type.unpack, packed_transactions),
42         merkle_branch=bitcoin_data.calculate_merkle_branch([0] + map(bitcoin_data.hash256, packed_transactions), 0),
43         subsidy=work['coinbasevalue'],
44         time=work['time'],
45         bits=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
46         coinbaseflags=work['coinbaseflags'].decode('hex') if 'coinbaseflags' in work else ''.join(x.decode('hex') for x in work['coinbaseaux'].itervalues()) if 'coinbaseaux' in work else '',
47     ))
48
49 @defer.inlineCallbacks
50 def main(args, net, datadir_path, merged_urls, worker_endpoint):
51     try:
52         print 'p2pool (version %s)' % (p2pool.__version__,)
53         print
54         try:
55             from . import draw
56         except ImportError:
57             draw = None
58             print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
59             print
60         
61         # connect to bitcoind over JSON-RPC and do initial getmemorypool
62         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
63         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
64         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
65         good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.PARENT.RPC_CHECK)(bitcoind)
66         if not good:
67             print >>sys.stderr, "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
68             return
69         temp_work = yield getwork(bitcoind)
70         print '    ...success!'
71         print '    Current block hash: %x' % (temp_work['previous_block_hash'],)
72         print
73         
74         # connect to bitcoind over bitcoin-p2p
75         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
76         factory = bitcoin_p2p.ClientFactory(net.PARENT)
77         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
78         yield factory.getProtocol() # waits until handshake is successful
79         print '    ...success!'
80         print
81         
82         print 'Determining payout script...'
83         if args.pubkey_hash is None:
84             address_path = os.path.join(datadir_path, 'cached_payout_address')
85             
86             if os.path.exists(address_path):
87                 with open(address_path, 'rb') as f:
88                     address = f.read().strip('\r\n')
89                 print '    Loaded cached address: %s...' % (address,)
90             else:
91                 address = None
92             
93             if address is not None:
94                 res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))()
95                 if not res['isvalid'] or not res['ismine']:
96                     print '    Cached address is either invalid or not controlled by local bitcoind!'
97                     address = None
98             
99             if address is None:
100                 print '    Getting payout address from bitcoind...'
101                 address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()
102             
103             with open(address_path, 'wb') as f:
104                 f.write(address)
105             
106             my_script = bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(address, net.PARENT))
107         else:
108             print '    ...Computing payout script from provided address...'
109             my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
110         print '    ...success! Payout script:', bitcoin_data.script2_to_human(my_script, net.PARENT)
111         print
112         
113         my_share_hashes = set()
114         my_doa_share_hashes = set()
115         
116         tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes)
117         shared_share_hashes = set()
118         ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
119         known_verified = set()
120         recent_blocks = []
121         print "Loading shares..."
122         for i, (mode, contents) in enumerate(ss.get_shares()):
123             if mode == 'share':
124                 if contents.hash in tracker.shares:
125                     continue
126                 shared_share_hashes.add(contents.hash)
127                 contents.time_seen = 0
128                 tracker.add(contents)
129                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
130                     print "    %i" % (len(tracker.shares),)
131             elif mode == 'verified_hash':
132                 known_verified.add(contents)
133             else:
134                 raise AssertionError()
135         print "    ...inserting %i verified shares..." % (len(known_verified),)
136         for h in known_verified:
137             if h not in tracker.shares:
138                 ss.forget_verified_share(h)
139                 continue
140             tracker.verified.add(tracker.shares[h])
141         print "    ...done loading %i shares!" % (len(tracker.shares),)
142         print
143         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
144         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
145         tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
146         
147         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
148         
149         pre_current_work = variable.Variable(None)
150         pre_merged_work = variable.Variable({})
151         # information affecting work that should trigger a long-polling update
152         current_work = variable.Variable(None)
153         # information affecting work that should not trigger a long-polling update
154         current_work2 = variable.Variable(None)
155         
156         requested = expiring_dict.ExpiringDict(300)
157         
158         print 'Initializing work...'
159         @defer.inlineCallbacks
160         def set_real_work1():
161             work = yield getwork(bitcoind)
162             current_work2.set(dict(
163                 time=work['time'],
164                 transactions=work['transactions'],
165                 merkle_branch=work['merkle_branch'],
166                 subsidy=work['subsidy'],
167                 clock_offset=time.time() - work['time'],
168                 last_update=time.time(),
169             )) # second set first because everything hooks on the first
170             pre_current_work.set(dict(
171                 version=work['version'],
172                 previous_block=work['previous_block_hash'],
173                 bits=work['bits'],
174                 coinbaseflags=work['coinbaseflags'],
175             ))
176         yield set_real_work1()
177         
178         if '\ngetblock ' in (yield deferral.retry()(bitcoind.rpc_help)()):
179             height_cacher = deferral.DeferredCacher(defer.inlineCallbacks(lambda block_hash: defer.returnValue((yield bitcoind.rpc_getblock('%x' % (block_hash,)))['blockcount'])))
180             best_height_cached = variable.Variable((yield deferral.retry()(height_cacher)(pre_current_work.value['previous_block'])))
181             def get_height_rel_highest(block_hash):
182                 this_height = height_cacher.call_now(block_hash, 0)
183                 best_height = height_cacher.call_now(pre_current_work.value['previous_block'], 0)
184                 best_height_cached.set(max(best_height_cached.value, this_height, best_height))
185                 return this_height - best_height_cached.value
186         else:
187             get_height_rel_highest = bitcoin_p2p.HeightTracker(bitcoind, factory).get_height_rel_highest
188         
189         def set_real_work2():
190             best, desired = tracker.think(get_height_rel_highest, pre_current_work.value['previous_block'], pre_current_work.value['bits'])
191             
192             t = dict(pre_current_work.value)
193             t['best_share_hash'] = best
194             t['mm_chains'] = pre_merged_work.value
195             current_work.set(t)
196             
197             t = time.time()
198             for peer2, share_hash in desired:
199                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
200                     continue
201                 last_request_time, count = requested.get(share_hash, (None, 0))
202                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
203                     continue
204                 potential_peers = set()
205                 for head in tracker.tails[share_hash]:
206                     potential_peers.update(peer_heads.get(head, set()))
207                 potential_peers = [peer for peer in potential_peers if peer.connected2]
208                 if count == 0 and peer2 is not None and peer2.connected2:
209                     peer = peer2
210                 else:
211                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
212                     if peer is None:
213                         continue
214                 
215                 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
216                 peer.send_getshares(
217                     hashes=[share_hash],
218                     parents=2000,
219                     stops=list(set(tracker.heads) | set(
220                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
221                     ))[:100],
222                 )
223                 requested[share_hash] = t, count + 1
224         pre_current_work.changed.watch(lambda _: set_real_work2())
225         pre_merged_work.changed.watch(lambda _: set_real_work2())
226         set_real_work2()
227         print '    ...success!'
228         print
229         
230         
231         @defer.inlineCallbacks
232         def set_merged_work(merged_url, merged_userpass):
233             merged_proxy = jsonrpc.Proxy(merged_url, (merged_userpass,))
234             while True:
235                 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged_proxy.rpc_getauxblock)()
236                 pre_merged_work.set(dict(pre_merged_work.value, **{auxblock['chainid']: dict(
237                     hash=int(auxblock['hash'], 16),
238                     target=pack.IntType(256).unpack(auxblock['target'].decode('hex')),
239                     merged_proxy=merged_proxy,
240                 )}))
241                 yield deferral.sleep(1)
242         for merged_url, merged_userpass in merged_urls:
243             set_merged_work(merged_url, merged_userpass)
244         
245         @pre_merged_work.changed.watch
246         def _(new_merged_work):
247             print 'Got new merged mining work!'
248         
249         # setup p2p logic and join p2pool network
250         
251         class Node(p2p.Node):
252             def handle_shares(self, shares, peer):
253                 if len(shares) > 5:
254                     print 'Processing %i shares from %s...' % (len(shares), '%s:%i' % peer.addr if peer is not None else None)
255                 
256                 new_count = 0
257                 for share in shares:
258                     if share.hash in tracker.shares:
259                         #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
260                         continue
261                     
262                     new_count += 1
263                     
264                     #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
265                     
266                     tracker.add(share)
267                 
268                 if shares and peer is not None:
269                     peer_heads.setdefault(shares[0].hash, set()).add(peer)
270                 
271                 if new_count:
272                     set_real_work2()
273                 
274                 if len(shares) > 5:
275                     print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
276             
277             def handle_share_hashes(self, hashes, peer):
278                 t = time.time()
279                 get_hashes = []
280                 for share_hash in hashes:
281                     if share_hash in tracker.shares:
282                         continue
283                     last_request_time, count = requested.get(share_hash, (None, 0))
284                     if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
285                         continue
286                     print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
287                     get_hashes.append(share_hash)
288                     requested[share_hash] = t, count + 1
289                 
290                 if hashes and peer is not None:
291                     peer_heads.setdefault(hashes[0], set()).add(peer)
292                 if get_hashes:
293                     peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
294             
295             def handle_get_shares(self, hashes, parents, stops, peer):
296                 parents = min(parents, 1000//len(hashes))
297                 stops = set(stops)
298                 shares = []
299                 for share_hash in hashes:
300                     for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))):
301                         if share.hash in stops:
302                             break
303                         shares.append(share)
304                 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
305                 peer.sendShares(shares)
306         
307         @tracker.verified.added.watch
308         def _(share):
309             if share.pow_hash <= share.header['bits'].target:
310                 if factory.conn.value is not None:
311                     factory.conn.value.send_block(block=share.as_block(tracker))
312                 else:
313                     print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Erp!'
314                 print
315                 print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %x' % (p2pool_data.format_hash(share.hash), share.header_hash)
316                 print
317                 recent_blocks.append({ 'ts': share.timestamp, 'hash': '%x' % (share.header_hash) })
318         
319         print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
320         
321         @defer.inlineCallbacks
322         def parse(x):
323             if ':' in x:
324                 ip, port = x.split(':')
325                 defer.returnValue(((yield reactor.resolve(ip)), int(port)))
326             else:
327                 defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT))
328         
329         addrs = {}
330         if os.path.exists(os.path.join(datadir_path, 'addrs.txt')):
331             try:
332                 addrs.update(dict(eval(x) for x in open(os.path.join(datadir_path, 'addrs.txt'))))
333             except:
334                 print >>sys.stderr, "error reading addrs"
335         for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
336             try:
337                 addr = yield addr_df
338                 if addr not in addrs:
339                     addrs[addr] = (0, time.time(), time.time())
340             except:
341                 log.err()
342         
343         connect_addrs = set()
344         for addr_df in map(parse, args.p2pool_nodes):
345             try:
346                 connect_addrs.add((yield addr_df))
347             except:
348                 log.err()
349         
350         p2p_node = Node(
351             best_share_hash_func=lambda: current_work.value['best_share_hash'],
352             port=args.p2pool_port,
353             net=net,
354             addr_store=addrs,
355             connect_addrs=connect_addrs,
356         )
357         p2p_node.start()
358         
359         def save_addrs():
360             open(os.path.join(datadir_path, 'addrs.txt'), 'w').writelines(repr(x) + '\n' for x in p2p_node.addr_store.iteritems())
361         task.LoopingCall(save_addrs).start(60)
362         
363         # send share when the chain changes to their chain
364         def work_changed(new_work):
365             #print 'Work changed:', new_work
366             shares = []
367             for share in tracker.get_chain(new_work['best_share_hash'], min(5, tracker.get_height(new_work['best_share_hash']))):
368                 if share.hash in shared_share_hashes:
369                     break
370                 shared_share_hashes.add(share.hash)
371                 shares.append(share)
372             
373             for peer in p2p_node.peers.itervalues():
374                 peer.sendShares([share for share in shares if share.peer is not peer])
375         
376         current_work.changed.watch(work_changed)
377         
378         def save_shares():
379             for share in tracker.get_chain(current_work.value['best_share_hash'], min(tracker.get_height(current_work.value['best_share_hash']), 2*net.CHAIN_LENGTH)):
380                 ss.add_share(share)
381                 if share.hash in tracker.verified.shares:
382                     ss.add_verified_hash(share.hash)
383         task.LoopingCall(save_shares).start(60)
384         
385         print '    ...success!'
386         print
387         
388         start_time = time.time()
389         
390         @defer.inlineCallbacks
391         def upnp_thread():
392             while True:
393                 try:
394                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
395                     if is_lan:
396                         pm = yield portmapper.get_port_mapper()
397                         yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
398                 except defer.TimeoutError:
399                     pass
400                 except:
401                     if p2pool.DEBUG:
402                         log.err(None, "UPnP error:")
403                 yield deferral.sleep(random.expovariate(1/120))
404         
405         if args.upnp:
406             upnp_thread()
407         
408         # start listening for workers with a JSON-RPC server
409         
410         print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1])
411         
412         if os.path.exists(os.path.join(datadir_path, 'vip_pass')):
413             with open(os.path.join(datadir_path, 'vip_pass'), 'rb') as f:
414                 vip_pass = f.read().strip('\r\n')
415         else:
416             vip_pass = '%016x' % (random.randrange(2**64),)
417             with open(os.path.join(datadir_path, 'vip_pass'), 'wb') as f:
418                 f.write(vip_pass)
419         print '    Worker password:', vip_pass, '(only required for generating graphs)'
420         
421         # setup worker logic
422         
423         removed_unstales_var = variable.Variable((0, 0, 0))
424         @tracker.verified.removed.watch
425         def _(share):
426             if share.hash in my_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
427                 assert share.share_data['stale_info'] in [0, 253, 254] # we made these shares in this instance
428                 removed_unstales_var.set((
429                     removed_unstales_var.value[0] + 1,
430                     removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 253 else 0),
431                     removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 254 else 0),
432                 ))
433         
434         removed_doa_unstales_var = variable.Variable(0)
435         @tracker.verified.removed.watch
436         def _(share):
437             if share.hash in my_doa_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
438                 removed_doa_unstales.set(removed_doa_unstales.value + 1)
439         
440         def get_stale_counts():
441             '''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)'''
442             my_shares = len(my_share_hashes)
443             my_doa_shares = len(my_doa_share_hashes)
444             delta = tracker.verified.get_delta(current_work.value['best_share_hash'])
445             my_shares_in_chain = delta.my_count + removed_unstales_var.value[0]
446             my_doa_shares_in_chain = delta.my_doa_count + removed_doa_unstales_var.value
447             orphans_recorded_in_chain = delta.my_orphan_announce_count + removed_unstales_var.value[1]
448             doas_recorded_in_chain = delta.my_dead_announce_count + removed_unstales_var.value[2]
449             
450             my_shares_not_in_chain = my_shares - my_shares_in_chain
451             my_doa_shares_not_in_chain = my_doa_shares - my_doa_shares_in_chain
452             
453             return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
454         
455         
456         local_rate_monitor = math.RateMonitor(10*60)
457         
458         class WorkerBridge(worker_interface.WorkerBridge):
459             def __init__(self):
460                 worker_interface.WorkerBridge.__init__(self)
461                 self.new_work_event = current_work.changed
462                 self.recent_shares_ts_work = []
463             
464             def _get_payout_script_from_username(self, user):
465                 if user is None:
466                     return None
467                 try:
468                     pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, net.PARENT)
469                 except: # XXX blah
470                     return None
471                 return bitcoin_data.pubkey_hash_to_script2(pubkey_hash)
472             
473             def preprocess_request(self, request):
474                 payout_script = self._get_payout_script_from_username(request.getUser())
475                 if payout_script is None or random.uniform(0, 100) < args.worker_fee:
476                     payout_script = my_script
477                 return payout_script,
478             
479             def get_work(self, payout_script):
480                 if len(p2p_node.peers) == 0 and net.PERSIST:
481                     raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
482                 if current_work.value['best_share_hash'] is None and net.PERSIST:
483                     raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
484                 if time.time() > current_work2.value['last_update'] + 60:
485                     raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
486                 
487                 if current_work.value['mm_chains']:
488                     tree, size = bitcoin_data.make_auxpow_tree(current_work.value['mm_chains'])
489                     mm_hashes = [current_work.value['mm_chains'].get(tree.get(i), dict(hash=0))['hash'] for i in xrange(size)]
490                     mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(dict(
491                         merkle_root=bitcoin_data.merkle_hash(mm_hashes),
492                         size=size,
493                         nonce=0,
494                     ))
495                     mm_later = [(aux_work, mm_hashes.index(aux_work['hash']), mm_hashes) for chain_id, aux_work in current_work.value['mm_chains'].iteritems()]
496                 else:
497                     mm_data = ''
498                     mm_later = []
499                 
500                 share_info, generate_tx = p2pool_data.generate_transaction(
501                     tracker=tracker,
502                     share_data=dict(
503                         previous_share_hash=current_work.value['best_share_hash'],
504                         coinbase=(mm_data + current_work.value['coinbaseflags'])[:100],
505                         nonce=struct.pack('<Q', random.randrange(2**64)),
506                         new_script=payout_script,
507                         subsidy=current_work2.value['subsidy'],
508                         donation=math.perfect_round(65535*args.donation_percentage/100),
509                         stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
510                             253 if orphans > orphans_recorded_in_chain else
511                             254 if doas > doas_recorded_in_chain else
512                             0
513                         )(*get_stale_counts()),
514                     ),
515                     block_target=current_work.value['bits'].target,
516                     desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
517                     net=net,
518                 )
519                 
520                 target = 2**256//2**32 - 1
521                 if len(self.recent_shares_ts_work) == 50:
522                     hash_rate = sum(work for ts, work in self.recent_shares_ts_work)//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
523                     target = min(target, 2**256//(hash_rate * 5))
524                 target = max(target, share_info['bits'].target)
525                 for aux_work in current_work.value['mm_chains'].itervalues():
526                     target = max(target, aux_work['target'])
527                 
528                 transactions = [generate_tx] + list(current_work2.value['transactions'])
529                 merkle_root = bitcoin_data.check_merkle_branch(bitcoin_data.hash256(bitcoin_data.tx_type.pack(generate_tx)), 0, current_work2.value['merkle_branch'])
530                 
531                 getwork_time = time.time()
532                 merkle_branch = current_work2.value['merkle_branch']
533                 
534                 print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % (
535                     bitcoin_data.target_to_difficulty(target),
536                     bitcoin_data.target_to_difficulty(share_info['bits'].target),
537                     current_work2.value['subsidy']*1e-8, net.PARENT.SYMBOL,
538                     len(current_work2.value['transactions']),
539                 )
540                 
541                 ba = bitcoin_getwork.BlockAttempt(
542                     version=current_work.value['version'],
543                     previous_block=current_work.value['previous_block'],
544                     merkle_root=merkle_root,
545                     timestamp=current_work2.value['time'],
546                     bits=current_work.value['bits'],
547                     share_target=target,
548                 )
549                 
550                 def got_response(header, request):
551                     assert header['merkle_root'] == merkle_root
552                     
553                     header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header))
554                     pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
555                     on_time = current_work.value['best_share_hash'] == share_info['share_data']['previous_share_hash']
556                     
557                     try:
558                         if pow_hash <= header['bits'].target or p2pool.DEBUG:
559                             @deferral.retry('Error submitting primary block: (will retry)', 10, 10)
560                             def submit_block():
561                                 if factory.conn.value is None:
562                                     print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Hash: %x' % (header_hash,)
563                                     raise deferral.RetrySilentlyException()
564                                 factory.conn.value.send_block(block=dict(header=header, txs=transactions))
565                             submit_block()
566                             if pow_hash <= header['bits'].target:
567                                 print
568                                 print 'GOT BLOCK FROM MINER! Passing to bitcoind! bitcoin: %x' % (header_hash,)
569                                 print
570                                 recent_blocks.append(dict(ts=time.time(), hash='%x' % (header_hash,)))
571                     except:
572                         log.err(None, 'Error while processing potential block:')
573                     
574                     for aux_work, index, hashes in mm_later:
575                         try:
576                             if pow_hash <= aux_work['target'] or p2pool.DEBUG:
577                                 df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)(
578                                     pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
579                                     bitcoin_data.aux_pow_type.pack(dict(
580                                         merkle_tx=dict(
581                                             tx=transactions[0],
582                                             block_hash=header_hash,
583                                             merkle_branch=merkle_branch,
584                                             index=0,
585                                         ),
586                                         merkle_branch=bitcoin_data.calculate_merkle_branch(hashes, index),
587                                         index=index,
588                                         parent_block_header=header,
589                                     )).encode('hex'),
590                                 )
591                                 @df.addCallback
592                                 def _(result):
593                                     if result != (pow_hash <= aux_work['target']):
594                                         print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
595                                     else:
596                                         print 'Merged block submittal result: %s' % (result,)
597                                 @df.addErrback
598                                 def _(err):
599                                     log.err(err, 'Error submitting merged block:')
600                         except:
601                             log.err(None, 'Error while processing merged mining POW:')
602                     
603                     if pow_hash <= share_info['bits'].target:
604                         share = p2pool_data.Share(net, header, share_info, merkle_branch=merkle_branch, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
605                         print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
606                             request.getUser(),
607                             p2pool_data.format_hash(share.hash),
608                             p2pool_data.format_hash(share.previous_hash),
609                             time.time() - getwork_time,
610                             ' DEAD ON ARRIVAL' if not on_time else '',
611                         )
612                         my_share_hashes.add(share.hash)
613                         if not on_time:
614                             my_doa_share_hashes.add(share.hash)
615                         
616                         tracker.add(share)
617                         if not p2pool.DEBUG:
618                             tracker.verified.add(share)
619                         set_real_work2()
620                         
621                         try:
622                             if pow_hash <= header['bits'].target or p2pool.DEBUG:
623                                 for peer in p2p_node.peers.itervalues():
624                                     peer.sendShares([share])
625                                 shared_share_hashes.add(share.hash)
626                         except:
627                             log.err(None, 'Error forwarding block solution:')
628                     
629                     if pow_hash <= target:
630                         reactor.callLater(1, grapher.add_localrate_point, bitcoin_data.target_to_average_attempts(target), not on_time)
631                         if request.getPassword() == vip_pass:
632                             reactor.callLater(1, grapher.add_localminer_point, request.getUser(), bitcoin_data.target_to_average_attempts(target), not on_time)
633                         self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
634                         while len(self.recent_shares_ts_work) > 50:
635                             self.recent_shares_ts_work.pop(0)
636                         local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=request.getUser()))
637                     
638                     
639                     if pow_hash > target:
640                         print 'Worker %s submitted share with hash > target:' % (request.getUser(),)
641                         print '    Hash:   %56x' % (pow_hash,)
642                         print '    Target: %56x' % (target,)
643                     
644                     return on_time
645                 
646                 return ba, got_response
647         
648         web_root = resource.Resource()
649         worker_interface.WorkerInterface(WorkerBridge()).attach_to(web_root)
650         
651         def get_rate():
652             if tracker.get_height(current_work.value['best_share_hash']) < 720:
653                 return json.dumps(None)
654             return json.dumps(p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
655                 / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720)))
656         
657         def get_users():
658             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
659             weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
660             res = {}
661             for script in sorted(weights, key=lambda s: weights[s]):
662                 res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
663             return json.dumps(res)
664         
665         def get_current_txouts():
666             share = tracker.shares[current_work.value['best_share_hash']]
667             share_info, gentx = p2pool_data.generate_transaction(tracker, share.share_info['share_data'], share.header['bits'].target, share.share_info['timestamp'], share.net)
668             return dict((out['script'], out['value']) for out in gentx['tx_outs'])
669         
670         def get_current_scaled_txouts(scale, trunc=0):
671             txouts = get_current_txouts()
672             total = sum(txouts.itervalues())
673             results = dict((script, value*scale//total) for script, value in txouts.iteritems())
674             if trunc > 0:
675                 total_random = 0
676                 random_set = set()
677                 for s in sorted(results, key=results.__getitem__):
678                     if results[s] >= trunc:
679                         break
680                     total_random += results[s]
681                     random_set.add(s)
682                 if total_random:
683                     winner = math.weighted_choice((script, results[script]) for script in random_set)
684                     for script in random_set:
685                         del results[script]
686                     results[winner] = total_random
687             if sum(results.itervalues()) < int(scale):
688                 results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
689             return results
690         
691         def get_current_payouts():
692             return json.dumps(dict((bitcoin_data.script2_to_human(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems()))
693         
694         def get_patron_sendmany(this):
695             try:
696                 if '/' in this:
697                     this, trunc = this.split('/', 1)
698                 else:
699                     trunc = '0.01'
700                 return json.dumps(dict(
701                     (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
702                     for script, value in get_current_scaled_txouts(scale=int(float(this)*1e8), trunc=int(float(trunc)*1e8)).iteritems()
703                     if bitcoin_data.script2_to_address(script, net.PARENT) is not None
704                 ))
705             except:
706                 return json.dumps(None)
707         
708         def get_global_stats():
709             # averaged over last hour
710             lookbehind = 3600//net.SHARE_PERIOD
711             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
712                 return None
713             
714             nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
715             stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
716             return json.dumps(dict(
717                 pool_nonstale_hash_rate=nonstale_hash_rate,
718                 pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
719                 pool_stale_prop=stale_prop,
720             ))
721         
722         def get_local_stats():
723             lookbehind = 3600//net.SHARE_PERIOD
724             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
725                 return None
726             
727             global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
728             
729             my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
730             my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
731             my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
732             my_share_count = my_unstale_count + my_orphan_count + my_doa_count
733             my_stale_count = my_orphan_count + my_doa_count
734             
735             my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
736             
737             my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
738                 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1)
739                 if share.hash in my_share_hashes)
740             actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp -
741                 tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp)
742             share_att_s = my_work / actual_time
743             
744             miner_hash_rates = {}
745             miner_dead_hash_rates = {}
746             datums, dt = local_rate_monitor.get_datums_in_last()
747             for datum in datums:
748                 miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
749                 if datum['dead']:
750                     miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
751             
752             return json.dumps(dict(
753                 my_hash_rates_in_last_hour=dict(
754                     note="DEPRECATED",
755                     nonstale=share_att_s,
756                     rewarded=share_att_s/(1 - global_stale_prop),
757                     actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
758                 ),
759                 my_share_counts_in_last_hour=dict(
760                     shares=my_share_count,
761                     unstale_shares=my_unstale_count,
762                     stale_shares=my_stale_count,
763                     orphan_stale_shares=my_orphan_count,
764                     doa_stale_shares=my_doa_count,
765                 ),
766                 my_stale_proportions_in_last_hour=dict(
767                     stale=my_stale_prop,
768                     orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
769                     dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
770                 ),
771                 miner_hash_rates=miner_hash_rates,
772                 miner_dead_hash_rates=miner_dead_hash_rates,
773             ))
774         
775         def get_peer_addresses():
776             return ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues())
777         
778         def get_uptime():
779             return json.dumps(time.time() - start_time)
780         
781         class WebInterface(resource.Resource):
782             def __init__(self, func, mime_type, *fields):
783                 self.func, self.mime_type, self.fields = func, mime_type, fields
784             
785             def render_GET(self, request):
786                 request.setHeader('Content-Type', self.mime_type)
787                 request.setHeader('Access-Control-Allow-Origin', '*')
788                 return self.func(*(request.args[field][0] for field in self.fields))
789         
790         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
791         web_root.putChild('users', WebInterface(get_users, 'application/json'))
792         web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
793         web_root.putChild('current_payouts', WebInterface(get_current_payouts, 'application/json'))
794         web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain', 'total'))
795         web_root.putChild('global_stats', WebInterface(get_global_stats, 'application/json'))
796         web_root.putChild('local_stats', WebInterface(get_local_stats, 'application/json'))
797         web_root.putChild('peer_addresses', WebInterface(get_peer_addresses, 'text/plain'))
798         web_root.putChild('payout_addr', WebInterface(lambda: json.dumps(bitcoin_data.script2_to_human(my_script, net.PARENT)), 'application/json'))
799         web_root.putChild('recent_blocks', WebInterface(lambda: json.dumps(recent_blocks), 'application/json'))
800         web_root.putChild('uptime', WebInterface(get_uptime, 'application/json'))
801         if draw is not None:
802             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
803         
804         new_root = resource.Resource()
805         web_root.putChild('web', new_root)
806         
807         stat_log = []
808         if os.path.exists(os.path.join(datadir_path, 'stats')):
809             try:
810                 with open(os.path.join(datadir_path, 'stats'), 'rb') as f:
811                     stat_log = json.loads(f.read())
812             except:
813                 log.err(None, 'Error loading stats:')
814         def update_stat_log():
815             while stat_log and stat_log[0]['time'] < time.time() - 24*60*60:
816                 stat_log.pop(0)
817             
818             lookbehind = 3600//net.SHARE_PERIOD
819             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
820                 return None
821             
822             global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
823             (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
824             
825             miner_hash_rates = {}
826             miner_dead_hash_rates = {}
827             datums, dt = local_rate_monitor.get_datums_in_last()
828             for datum in datums:
829                 miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
830                 if datum['dead']:
831                     miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
832             
833             stat_log.append(dict(
834                 time=time.time(),
835                 pool_hash_rate=p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)/(1-global_stale_prop),
836                 pool_stale_prop=global_stale_prop,
837                 local_hash_rates=miner_hash_rates,
838                 local_dead_hash_rates=miner_dead_hash_rates,
839                 shares=shares,
840                 stale_shares=stale_orphan_shares + stale_doa_shares,
841                 stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares),
842                 current_payout=get_current_txouts().get(my_script, 0)*1e-8,
843             ))
844             
845             with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
846                 f.write(json.dumps(stat_log))
847         task.LoopingCall(update_stat_log).start(5*60)
848         new_root.putChild('log', WebInterface(lambda: json.dumps(stat_log), 'application/json'))
849         
850         grapher = graphs.Grapher(os.path.join(datadir_path, 'rrd'))
851         web_root.putChild('graphs', grapher.get_resource())
852         def add_point():
853             if tracker.get_height(current_work.value['best_share_hash']) < 720:
854                 return
855             nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
856             poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
857             grapher.add_poolrate_point(poolrate, poolrate - nonstalerate)
858         task.LoopingCall(add_point).start(100)
859         
860         def attempt_listen():
861             try:
862                 reactor.listenTCP(worker_endpoint[1], server.Site(web_root), interface=worker_endpoint[0])
863             except error.CannotListenError, e:
864                 print >>sys.stderr, 'Error binding to worker port: %s. Retrying in 1 second.' % (e.socketError,)
865                 reactor.callLater(1, attempt_listen)
866             else:
867                 with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
868                     pass
869         attempt_listen()
870         
871         print '    ...success!'
872         print
873         
874         
875         @defer.inlineCallbacks
876         def work_poller():
877             while True:
878                 flag = factory.new_block.get_deferred()
879                 try:
880                     yield set_real_work1()
881                 except:
882                     log.err()
883                 yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
884         work_poller()
885         
886         
887         # done!
888         print 'Started successfully!'
889         print
890         
891         
892         if hasattr(signal, 'SIGALRM'):
893             signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
894                 sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
895             ))
896             signal.siginterrupt(signal.SIGALRM, False)
897             task.LoopingCall(signal.alarm, 30).start(1)
898         
899         if args.irc_announce:
900             from twisted.words.protocols import irc
901             class IRCClient(irc.IRCClient):
902                 nickname = 'p2pool'
903                 def lineReceived(self, line):
904                     print repr(line)
905                     irc.IRCClient.lineReceived(self, line)
906                 def signedOn(self):
907                     irc.IRCClient.signedOn(self)
908                     self.factory.resetDelay()
909                     self.join('#p2pool')
910                     self.watch_id = tracker.verified.added.watch(self._new_share)
911                     self.announced_hashes = set()
912                 def _new_share(self, share):
913                     if share.pow_hash <= share.header['bits'].target and share.header_hash not in self.announced_hashes and abs(share.timestamp - time.time()) < 10*60:
914                         self.announced_hashes.add(share.header_hash)
915                         self.say('#p2pool', '\x02BLOCK FOUND by %s! http://blockexplorer.com/block/%064x' % (bitcoin_data.script2_to_address(share.share_data['new_script'], net.PARENT), share.header_hash))
916                 def connectionLost(self, reason):
917                     tracker.verified.added.unwatch(self.watch_id)
918                     print 'IRC connection lost:', reason.getErrorMessage()
919             class IRCClientFactory(protocol.ReconnectingClientFactory):
920                 protocol = IRCClient
921             reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())
922         
923         @defer.inlineCallbacks
924         def status_thread():
925             last_str = None
926             last_time = 0
927             while True:
928                 yield deferral.sleep(3)
929                 try:
930                     if time.time() > current_work2.value['last_update'] + 60:
931                         print >>sys.stderr, '''---> LOST CONTACT WITH BITCOIND for %s! Check that it isn't frozen or dead! <---''' % (math.format_dt(time.time() - current_work2.value['last_update']),)
932                     
933                     height = tracker.get_height(current_work.value['best_share_hash'])
934                     this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
935                         height,
936                         len(tracker.verified.shares),
937                         len(tracker.shares),
938                         len(p2p_node.peers),
939                         sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
940                     ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
941                     
942                     datums, dt = local_rate_monitor.get_datums_in_last()
943                     my_att_s = sum(datum['work']/dt for datum in datums)
944                     this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
945                         math.format(int(my_att_s)),
946                         math.format_dt(dt),
947                         math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95),
948                         math.format_dt(2**256 / tracker.shares[current_work.value['best_share_hash']].target / my_att_s) if my_att_s else '???',
949                     )
950                     
951                     if height > 2:
952                         (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
953                         stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], min(720, height))
954                         real_att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720)) / (1 - stale_prop)
955                         
956                         this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % (
957                             shares, stale_orphan_shares, stale_doa_shares,
958                             math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
959                             math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
960                             get_current_txouts().get(my_script, 0)*1e-8, net.PARENT.SYMBOL,
961                         )
962                         this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
963                             math.format(int(real_att_s)),
964                             100*stale_prop,
965                             math.format_dt(2**256 / current_work.value['bits'].target / real_att_s),
966                         )
967                     
968                     if this_str != last_str or time.time() > last_time + 15:
969                         print this_str
970                         last_str = this_str
971                         last_time = time.time()
972                 except:
973                     log.err()
974         status_thread()
975     except:
976         log.err(None, 'Fatal error:')
977         reactor.stop()
978
979 def run():
980     class FixedArgumentParser(argparse.ArgumentParser):
981         def _read_args_from_files(self, arg_strings):
982             # expand arguments referencing files
983             new_arg_strings = []
984             for arg_string in arg_strings:
985                 
986                 # for regular arguments, just add them back into the list
987                 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
988                     new_arg_strings.append(arg_string)
989                 
990                 # replace arguments referencing files with the file content
991                 else:
992                     try:
993                         args_file = open(arg_string[1:])
994                         try:
995                             arg_strings = []
996                             for arg_line in args_file.read().splitlines():
997                                 for arg in self.convert_arg_line_to_args(arg_line):
998                                     arg_strings.append(arg)
999                             arg_strings = self._read_args_from_files(arg_strings)
1000                             new_arg_strings.extend(arg_strings)
1001                         finally:
1002                             args_file.close()
1003                     except IOError:
1004                         err = sys.exc_info()[1]
1005                         self.error(str(err))
1006             
1007             # return the modified argument list
1008             return new_arg_strings
1009         
1010         def convert_arg_line_to_args(self, arg_line):
1011             return [arg for arg in arg_line.split() if arg.strip()]
1012     
1013     
1014     realnets=dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
1015     
1016     parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
1017     parser.add_argument('--version', action='version', version=p2pool.__version__)
1018     parser.add_argument('--net',
1019         help='use specified network (default: bitcoin)',
1020         action='store', choices=sorted(realnets), default='bitcoin', dest='net_name')
1021     parser.add_argument('--testnet',
1022         help='''use the network's testnet''',
1023         action='store_const', const=True, default=False, dest='testnet')
1024     parser.add_argument('--debug',
1025         help='enable debugging mode',
1026         action='store_const', const=True, default=False, dest='debug')
1027     parser.add_argument('-a', '--address',
1028         help='generate payouts to this address (default: <address requested from bitcoind>)',
1029         type=str, action='store', default=None, dest='address')
1030     parser.add_argument('--datadir',
1031         help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
1032         type=str, action='store', default=None, dest='datadir')
1033     parser.add_argument('--logfile',
1034         help='''log to this file (default: data/<NET>/log)''',
1035         type=str, action='store', default=None, dest='logfile')
1036     parser.add_argument('--merged',
1037         help='call getauxblock on this url to get work for merged mining (example: http://ncuser:ncpass@127.0.0.1:10332/)',
1038         type=str, action='append', default=[], dest='merged_urls')
1039     parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
1040         help='donate this percentage of work to author of p2pool (default: 0.5)',
1041         type=float, action='store', default=0.5, dest='donation_percentage')
1042     parser.add_argument('--irc-announce',
1043         help='announce any blocks found on irc://irc.freenode.net/#p2pool',
1044         action='store_true', default=False, dest='irc_announce')
1045     
1046     p2pool_group = parser.add_argument_group('p2pool interface')
1047     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
1048         help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
1049         type=int, action='store', default=None, dest='p2pool_port')
1050     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
1051         help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
1052         type=str, action='append', default=[], dest='p2pool_nodes')
1053     parser.add_argument('--disable-upnp',
1054         help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
1055         action='store_false', default=True, dest='upnp')
1056     
1057     worker_group = parser.add_argument_group('worker interface')
1058     worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT',
1059         help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
1060         type=str, action='store', default=None, dest='worker_endpoint')
1061     worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
1062         help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
1063         type=float, action='store', default=0, dest='worker_fee')
1064     
1065     bitcoind_group = parser.add_argument_group('bitcoind interface')
1066     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
1067         help='connect to this address (default: 127.0.0.1)',
1068         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
1069     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
1070         help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
1071         type=int, action='store', default=None, dest='bitcoind_rpc_port')
1072     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
1073         help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
1074         type=int, action='store', default=None, dest='bitcoind_p2p_port')
1075     
1076     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
1077         help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
1078         type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
1079     
1080     args = parser.parse_args()
1081     
1082     if args.debug:
1083         p2pool.DEBUG = True
1084     
1085     net_name = args.net_name + ('_testnet' if args.testnet else '')
1086     net = networks.nets[net_name]
1087     
1088     datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
1089     if not os.path.exists(datadir_path):
1090         os.makedirs(datadir_path)
1091     
1092     if len(args.bitcoind_rpc_userpass) > 2:
1093         parser.error('a maximum of two arguments are allowed')
1094     args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
1095     
1096     if args.bitcoind_rpc_password is None:
1097         if not hasattr(net.PARENT, 'CONF_FILE_FUNC'):
1098             parser.error('This network has no configuration file function. Manually enter your RPC password.')
1099         conf_path = net.PARENT.CONF_FILE_FUNC()
1100         if not os.path.exists(conf_path):
1101             parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
1102                 '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
1103                 '''\r\n'''
1104                 '''server=1\r\n'''
1105                 '''rpcpassword=%x''' % (conf_path, random.randrange(2**128)))
1106         with open(conf_path, 'rb') as f:
1107             cp = ConfigParser.RawConfigParser()
1108             cp.readfp(StringIO.StringIO('[x]\r\n' + f.read()))
1109             for conf_name, var_name, var_type in [
1110                 ('rpcuser', 'bitcoind_rpc_username', str),
1111                 ('rpcpassword', 'bitcoind_rpc_password', str),
1112                 ('rpcport', 'bitcoind_rpc_port', int),
1113                 ('port', 'bitcoind_p2p_port', int),
1114             ]:
1115                 if getattr(args, var_name) is None and cp.has_option('x', conf_name):
1116                     setattr(args, var_name, var_type(cp.get('x', conf_name)))
1117     
1118     if args.bitcoind_rpc_username is None:
1119         args.bitcoind_rpc_username = ''
1120     
1121     if args.bitcoind_rpc_port is None:
1122         args.bitcoind_rpc_port = net.PARENT.RPC_PORT
1123     
1124     if args.bitcoind_p2p_port is None:
1125         args.bitcoind_p2p_port = net.PARENT.P2P_PORT
1126     
1127     if args.p2pool_port is None:
1128         args.p2pool_port = net.P2P_PORT
1129     
1130     if args.worker_endpoint is None:
1131         worker_endpoint = '', net.WORKER_PORT
1132     elif ':' not in args.worker_endpoint:
1133         worker_endpoint = '', int(args.worker_endpoint)
1134     else:
1135         addr, port = args.worker_endpoint.rsplit(':', 1)
1136         worker_endpoint = addr, int(port)
1137     
1138     if args.address is not None:
1139         try:
1140             args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
1141         except Exception, e:
1142             parser.error('error parsing address: ' + repr(e))
1143     else:
1144         args.pubkey_hash = None
1145     
1146     def separate_url(url):
1147         s = urlparse.urlsplit(url)
1148         if '@' not in s.netloc:
1149             parser.error('merged url netloc must contain an "@"')
1150         userpass, new_netloc = s.netloc.rsplit('@', 1)
1151         return urlparse.urlunsplit(s._replace(netloc=new_netloc)), userpass
1152     merged_urls = map(separate_url, args.merged_urls)
1153     
1154     if args.logfile is None:
1155         args.logfile = os.path.join(datadir_path, 'log')
1156     
1157     logfile = logging.LogFile(args.logfile)
1158     pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
1159     sys.stdout = logging.AbortPipe(pipe)
1160     sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
1161     if hasattr(signal, "SIGUSR1"):
1162         def sigusr1(signum, frame):
1163             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
1164             logfile.reopen()
1165             print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
1166         signal.signal(signal.SIGUSR1, sigusr1)
1167     task.LoopingCall(logfile.reopen).start(5)
1168     
1169     reactor.callWhenRunning(main, args, net, datadir_path, merged_urls, worker_endpoint)
1170     reactor.run()