don't irc announce shares more than 10 minutes in past or future
[p2pool.git] / p2pool / main.py
1 from __future__ import division
2
3 import ConfigParser
4 import StringIO
5 import argparse
6 import os
7 import random
8 import struct
9 import sys
10 import time
11 import json
12 import signal
13 import traceback
14 import urlparse
15
16 from twisted.internet import defer, error, reactor, protocol, task
17 from twisted.web import server, resource
18 from twisted.python import log
19 from nattraverso import portmapper, ipdiscover
20
21 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
22 from bitcoin import worker_interface
23 from util import expiring_dict, jsonrpc, variable, deferral, math, logging, pack
24 from . import p2p, networks, graphs
25 import p2pool, p2pool.data as p2pool_data
26
27 @deferral.retry('Error getting work from bitcoind:', 3)
28 @defer.inlineCallbacks
29 def getwork(bitcoind):
30     try:
31         work = yield bitcoind.rpc_getmemorypool()
32     except jsonrpc.Error, e:
33         if e.code == -32601: # Method not found
34             print >>sys.stderr, 'Error: Bitcoin version too old! Upgrade to v0.5 or newer!'
35             raise deferral.RetrySilentlyException()
36         raise
37     packed_transactions = [x.decode('hex') for x in work['transactions']]
38     defer.returnValue(dict(
39         version=work['version'],
40         previous_block_hash=int(work['previousblockhash'], 16),
41         transactions=map(bitcoin_data.tx_type.unpack, packed_transactions),
42         merkle_branch=bitcoin_data.calculate_merkle_branch([0] + map(bitcoin_data.hash256, packed_transactions), 0),
43         subsidy=work['coinbasevalue'],
44         time=work['time'],
45         bits=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
46         coinbaseflags=work['coinbaseflags'].decode('hex') if 'coinbaseflags' in work else ''.join(x.decode('hex') for x in work['coinbaseaux'].itervalues()) if 'coinbaseaux' in work else '',
47     ))
48
49 @defer.inlineCallbacks
50 def main(args, net, datadir_path, merged_urls, worker_endpoint):
51     try:
52         print 'p2pool (version %s)' % (p2pool.__version__,)
53         print
54         try:
55             from . import draw
56         except ImportError:
57             draw = None
58             print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
59             print
60         
61         # connect to bitcoind over JSON-RPC and do initial getmemorypool
62         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
63         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
64         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
65         good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.PARENT.RPC_CHECK)(bitcoind)
66         if not good:
67             print >>sys.stderr, "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
68             return
69         temp_work = yield getwork(bitcoind)
70         print '    ...success!'
71         print '    Current block hash: %x' % (temp_work['previous_block_hash'],)
72         print
73         
74         # connect to bitcoind over bitcoin-p2p
75         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
76         factory = bitcoin_p2p.ClientFactory(net.PARENT)
77         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
78         yield factory.getProtocol() # waits until handshake is successful
79         print '    ...success!'
80         print
81         
82         print 'Determining payout script...'
83         if args.pubkey_hash is None:
84             address_path = os.path.join(datadir_path, 'cached_payout_address')
85             
86             if os.path.exists(address_path):
87                 with open(address_path, 'rb') as f:
88                     address = f.read().strip('\r\n')
89                 print '    Loaded cached address: %s...' % (address,)
90             else:
91                 address = None
92             
93             if address is not None:
94                 res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))()
95                 if not res['isvalid'] or not res['ismine']:
96                     print '    Cached address is either invalid or not controlled by local bitcoind!'
97                     address = None
98             
99             if address is None:
100                 print '    Getting payout address from bitcoind...'
101                 address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()
102             
103             with open(address_path, 'wb') as f:
104                 f.write(address)
105             
106             my_script = bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(address, net.PARENT))
107         else:
108             print '    ...Computing payout script from provided address...'
109             my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
110         print '    ...success! Payout script:', bitcoin_data.script2_to_human(my_script, net.PARENT)
111         print
112         
113         my_share_hashes = set()
114         my_doa_share_hashes = set()
115         
116         tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes)
117         shared_share_hashes = set()
118         ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
119         known_verified = set()
120         recent_blocks = []
121         print "Loading shares..."
122         for i, (mode, contents) in enumerate(ss.get_shares()):
123             if mode == 'share':
124                 if contents.hash in tracker.shares:
125                     continue
126                 shared_share_hashes.add(contents.hash)
127                 contents.time_seen = 0
128                 tracker.add(contents)
129                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
130                     print "    %i" % (len(tracker.shares),)
131             elif mode == 'verified_hash':
132                 known_verified.add(contents)
133             else:
134                 raise AssertionError()
135         print "    ...inserting %i verified shares..." % (len(known_verified),)
136         for h in known_verified:
137             if h not in tracker.shares:
138                 ss.forget_verified_share(h)
139                 continue
140             tracker.verified.add(tracker.shares[h])
141         print "    ...done loading %i shares!" % (len(tracker.shares),)
142         print
143         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
144         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
145         tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
146         
147         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
148         
149         pre_current_work = variable.Variable(None)
150         pre_merged_work = variable.Variable({})
151         # information affecting work that should trigger a long-polling update
152         current_work = variable.Variable(None)
153         # information affecting work that should not trigger a long-polling update
154         current_work2 = variable.Variable(None)
155         
156         requested = expiring_dict.ExpiringDict(300)
157         
158         @defer.inlineCallbacks
159         def set_real_work1():
160             work = yield getwork(bitcoind)
161             current_work2.set(dict(
162                 time=work['time'],
163                 transactions=work['transactions'],
164                 merkle_branch=work['merkle_branch'],
165                 subsidy=work['subsidy'],
166                 clock_offset=time.time() - work['time'],
167                 last_update=time.time(),
168             )) # second set first because everything hooks on the first
169             pre_current_work.set(dict(
170                 version=work['version'],
171                 previous_block=work['previous_block_hash'],
172                 bits=work['bits'],
173                 coinbaseflags=work['coinbaseflags'],
174             ))
175         
176         if '\ngetblock ' in (yield bitcoind.rpc_help()):
177             height_cacher = deferral.DeferredCacher(defer.inlineCallbacks(lambda block_hash: defer.returnValue((yield bitcoind.rpc_getblock('%x' % (block_hash,)))['blockcount'])))
178             def get_height_rel_highest(block_hash):
179                 return height_cacher.call_now(block_hash, 0) - height_cacher.call_now(pre_current_work.value['previous_block'], 1000000000)
180         else:
181             get_height_rel_highest = bitcoin_p2p.HeightTracker(bitcoind, factory).get_height_rel_highest
182         
183         def set_real_work2():
184             best, desired = tracker.think(get_height_rel_highest, pre_current_work.value['previous_block'], pre_current_work.value['bits'])
185             
186             t = dict(pre_current_work.value)
187             t['best_share_hash'] = best
188             t['mm_chains'] = pre_merged_work.value
189             current_work.set(t)
190             
191             t = time.time()
192             for peer2, share_hash in desired:
193                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
194                     continue
195                 last_request_time, count = requested.get(share_hash, (None, 0))
196                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
197                     continue
198                 potential_peers = set()
199                 for head in tracker.tails[share_hash]:
200                     potential_peers.update(peer_heads.get(head, set()))
201                 potential_peers = [peer for peer in potential_peers if peer.connected2]
202                 if count == 0 and peer2 is not None and peer2.connected2:
203                     peer = peer2
204                 else:
205                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
206                     if peer is None:
207                         continue
208                 
209                 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
210                 peer.send_getshares(
211                     hashes=[share_hash],
212                     parents=2000,
213                     stops=list(set(tracker.heads) | set(
214                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
215                     ))[:100],
216                 )
217                 requested[share_hash] = t, count + 1
218         pre_current_work.changed.watch(lambda _: set_real_work2())
219         
220         print 'Initializing work...'
221         yield set_real_work1()
222         print '    ...success!'
223         print
224         
225         pre_merged_work.changed.watch(lambda _: set_real_work2())
226         
227         
228         @defer.inlineCallbacks
229         def set_merged_work(merged_url, merged_userpass):
230             merged_proxy = jsonrpc.Proxy(merged_url, (merged_userpass,))
231             while True:
232                 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged_proxy.rpc_getauxblock)()
233                 pre_merged_work.set(dict(pre_merged_work.value, **{auxblock['chainid']: dict(
234                     hash=int(auxblock['hash'], 16),
235                     target=pack.IntType(256).unpack(auxblock['target'].decode('hex')),
236                     merged_proxy=merged_proxy,
237                 )}))
238                 yield deferral.sleep(1)
239         for merged_url, merged_userpass in merged_urls:
240             set_merged_work(merged_url, merged_userpass)
241         
242         @pre_merged_work.changed.watch
243         def _(new_merged_work):
244             print 'Got new merged mining work!'
245         
246         # setup p2p logic and join p2pool network
247         
248         class Node(p2p.Node):
249             def handle_shares(self, shares, peer):
250                 if len(shares) > 5:
251                     print 'Processing %i shares from %s...' % (len(shares), '%s:%i' % peer.addr if peer is not None else None)
252                 
253                 new_count = 0
254                 for share in shares:
255                     if share.hash in tracker.shares:
256                         #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
257                         continue
258                     
259                     new_count += 1
260                     
261                     #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
262                     
263                     tracker.add(share)
264                 
265                 if shares and peer is not None:
266                     peer_heads.setdefault(shares[0].hash, set()).add(peer)
267                 
268                 if new_count:
269                     set_real_work2()
270                 
271                 if len(shares) > 5:
272                     print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
273             
274             def handle_share_hashes(self, hashes, peer):
275                 t = time.time()
276                 get_hashes = []
277                 for share_hash in hashes:
278                     if share_hash in tracker.shares:
279                         continue
280                     last_request_time, count = requested.get(share_hash, (None, 0))
281                     if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
282                         continue
283                     print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
284                     get_hashes.append(share_hash)
285                     requested[share_hash] = t, count + 1
286                 
287                 if hashes and peer is not None:
288                     peer_heads.setdefault(hashes[0], set()).add(peer)
289                 if get_hashes:
290                     peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
291             
292             def handle_get_shares(self, hashes, parents, stops, peer):
293                 parents = min(parents, 1000//len(hashes))
294                 stops = set(stops)
295                 shares = []
296                 for share_hash in hashes:
297                     for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))):
298                         if share.hash in stops:
299                             break
300                         shares.append(share)
301                 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
302                 peer.sendShares(shares)
303         
304         @tracker.verified.added.watch
305         def _(share):
306             if share.pow_hash <= share.header['bits'].target:
307                 if factory.conn.value is not None:
308                     factory.conn.value.send_block(block=share.as_block(tracker))
309                 else:
310                     print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Erp!'
311                 print
312                 print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %x' % (p2pool_data.format_hash(share.hash), share.header_hash)
313                 print
314                 recent_blocks.append({ 'ts': share.timestamp, 'hash': '%x' % (share.header_hash) })
315         
316         print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
317         
318         @defer.inlineCallbacks
319         def parse(x):
320             if ':' in x:
321                 ip, port = x.split(':')
322                 defer.returnValue(((yield reactor.resolve(ip)), int(port)))
323             else:
324                 defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT))
325         
326         addrs = {}
327         if os.path.exists(os.path.join(datadir_path, 'addrs.txt')):
328             try:
329                 addrs.update(dict(eval(x) for x in open(os.path.join(datadir_path, 'addrs.txt'))))
330             except:
331                 print >>sys.stderr, "error reading addrs"
332         for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
333             try:
334                 addr = yield addr_df
335                 if addr not in addrs:
336                     addrs[addr] = (0, time.time(), time.time())
337             except:
338                 log.err()
339         
340         connect_addrs = set()
341         for addr_df in map(parse, args.p2pool_nodes):
342             try:
343                 connect_addrs.add((yield addr_df))
344             except:
345                 log.err()
346         
347         p2p_node = Node(
348             best_share_hash_func=lambda: current_work.value['best_share_hash'],
349             port=args.p2pool_port,
350             net=net,
351             addr_store=addrs,
352             connect_addrs=connect_addrs,
353         )
354         p2p_node.start()
355         
356         def save_addrs():
357             open(os.path.join(datadir_path, 'addrs.txt'), 'w').writelines(repr(x) + '\n' for x in p2p_node.addr_store.iteritems())
358         task.LoopingCall(save_addrs).start(60)
359         
360         # send share when the chain changes to their chain
361         def work_changed(new_work):
362             #print 'Work changed:', new_work
363             shares = []
364             for share in tracker.get_chain(new_work['best_share_hash'], min(5, tracker.get_height(new_work['best_share_hash']))):
365                 if share.hash in shared_share_hashes:
366                     break
367                 shared_share_hashes.add(share.hash)
368                 shares.append(share)
369             
370             for peer in p2p_node.peers.itervalues():
371                 peer.sendShares([share for share in shares if share.peer is not peer])
372         
373         current_work.changed.watch(work_changed)
374         
375         def save_shares():
376             for share in tracker.get_chain(current_work.value['best_share_hash'], min(tracker.get_height(current_work.value['best_share_hash']), 2*net.CHAIN_LENGTH)):
377                 ss.add_share(share)
378                 if share.hash in tracker.verified.shares:
379                     ss.add_verified_hash(share.hash)
380         task.LoopingCall(save_shares).start(60)
381         
382         print '    ...success!'
383         print
384         
385         start_time = time.time()
386         
387         @defer.inlineCallbacks
388         def upnp_thread():
389             while True:
390                 try:
391                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
392                     if is_lan:
393                         pm = yield portmapper.get_port_mapper()
394                         yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
395                 except defer.TimeoutError:
396                     pass
397                 except:
398                     if p2pool.DEBUG:
399                         log.err(None, "UPnP error:")
400                 yield deferral.sleep(random.expovariate(1/120))
401         
402         if args.upnp:
403             upnp_thread()
404         
405         # start listening for workers with a JSON-RPC server
406         
407         print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1])
408         
409         if os.path.exists(os.path.join(datadir_path, 'vip_pass')):
410             with open(os.path.join(datadir_path, 'vip_pass'), 'rb') as f:
411                 vip_pass = f.read().strip('\r\n')
412         else:
413             vip_pass = '%016x' % (random.randrange(2**64),)
414             with open(os.path.join(datadir_path, 'vip_pass'), 'wb') as f:
415                 f.write(vip_pass)
416         print '    Worker password:', vip_pass, '(only required for generating graphs)'
417         
418         # setup worker logic
419         
420         removed_unstales_var = variable.Variable((0, 0, 0))
421         @tracker.verified.removed.watch
422         def _(share):
423             if share.hash in my_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
424                 assert share.share_data['stale_info'] in [0, 253, 254] # we made these shares in this instance
425                 removed_unstales_var.set((
426                     removed_unstales_var.value[0] + 1,
427                     removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 253 else 0),
428                     removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 254 else 0),
429                 ))
430         
431         removed_doa_unstales_var = variable.Variable(0)
432         @tracker.verified.removed.watch
433         def _(share):
434             if share.hash in my_doa_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
435                 removed_doa_unstales.set(removed_doa_unstales.value + 1)
436         
437         def get_stale_counts():
438             '''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)'''
439             my_shares = len(my_share_hashes)
440             my_doa_shares = len(my_doa_share_hashes)
441             delta = tracker.verified.get_delta(current_work.value['best_share_hash'])
442             my_shares_in_chain = delta.my_count + removed_unstales_var.value[0]
443             my_doa_shares_in_chain = delta.my_doa_count + removed_doa_unstales_var.value
444             orphans_recorded_in_chain = delta.my_orphan_announce_count + removed_unstales_var.value[1]
445             doas_recorded_in_chain = delta.my_dead_announce_count + removed_unstales_var.value[2]
446             
447             my_shares_not_in_chain = my_shares - my_shares_in_chain
448             my_doa_shares_not_in_chain = my_doa_shares - my_doa_shares_in_chain
449             
450             return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
451         
452         
453         recent_shares_ts_work2 = []
454         
455         class WorkerBridge(worker_interface.WorkerBridge):
456             def __init__(self):
457                 worker_interface.WorkerBridge.__init__(self)
458                 self.new_work_event = current_work.changed
459                 self.recent_shares_ts_work = []
460             
461             def _get_payout_script_from_username(self, user):
462                 if user is None:
463                     return None
464                 try:
465                     pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, net.PARENT)
466                 except: # XXX blah
467                     return None
468                 return bitcoin_data.pubkey_hash_to_script2(pubkey_hash)
469             
470             def preprocess_request(self, request):
471                 payout_script = self._get_payout_script_from_username(request.getUser())
472                 if payout_script is None or random.uniform(0, 100) < args.worker_fee:
473                     payout_script = my_script
474                 return payout_script,
475             
476             def get_work(self, payout_script):
477                 if len(p2p_node.peers) == 0 and net.PERSIST:
478                     raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
479                 if current_work.value['best_share_hash'] is None and net.PERSIST:
480                     raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
481                 if time.time() > current_work2.value['last_update'] + 60:
482                     raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
483                 
484                 if current_work.value['mm_chains']:
485                     tree, size = bitcoin_data.make_auxpow_tree(current_work.value['mm_chains'])
486                     mm_hashes = [current_work.value['mm_chains'].get(tree.get(i), dict(hash=0))['hash'] for i in xrange(size)]
487                     mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(dict(
488                         merkle_root=bitcoin_data.merkle_hash(mm_hashes),
489                         size=size,
490                         nonce=0,
491                     ))
492                     mm_later = [(aux_work, mm_hashes.index(aux_work['hash']), mm_hashes) for chain_id, aux_work in current_work.value['mm_chains'].iteritems()]
493                 else:
494                     mm_data = ''
495                     mm_later = []
496                 
497                 share_info, generate_tx = p2pool_data.generate_transaction(
498                     tracker=tracker,
499                     share_data=dict(
500                         previous_share_hash=current_work.value['best_share_hash'],
501                         coinbase=(mm_data + current_work.value['coinbaseflags'])[:100],
502                         nonce=struct.pack('<Q', random.randrange(2**64)),
503                         new_script=payout_script,
504                         subsidy=current_work2.value['subsidy'],
505                         donation=math.perfect_round(65535*args.donation_percentage/100),
506                         stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
507                             253 if orphans > orphans_recorded_in_chain else
508                             254 if doas > doas_recorded_in_chain else
509                             0
510                         )(*get_stale_counts()),
511                     ),
512                     block_target=current_work.value['bits'].target,
513                     desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
514                     net=net,
515                 )
516                 
517                 target = 2**256//2**32 - 1
518                 if len(self.recent_shares_ts_work) == 50:
519                     hash_rate = sum(work for ts, work in self.recent_shares_ts_work)//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
520                     target = min(target, 2**256//(hash_rate * 5))
521                 target = max(target, share_info['bits'].target)
522                 for aux_work in current_work.value['mm_chains'].itervalues():
523                     target = max(target, aux_work['target'])
524                 
525                 transactions = [generate_tx] + list(current_work2.value['transactions'])
526                 merkle_root = bitcoin_data.check_merkle_branch(bitcoin_data.hash256(bitcoin_data.tx_type.pack(generate_tx)), 0, current_work2.value['merkle_branch'])
527                 
528                 getwork_time = time.time()
529                 merkle_branch = current_work2.value['merkle_branch']
530                 
531                 print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % (
532                     bitcoin_data.target_to_difficulty(target),
533                     bitcoin_data.target_to_difficulty(share_info['bits'].target),
534                     current_work2.value['subsidy']*1e-8, net.PARENT.SYMBOL,
535                     len(current_work2.value['transactions']),
536                 )
537                 
538                 ba = bitcoin_getwork.BlockAttempt(
539                     version=current_work.value['version'],
540                     previous_block=current_work.value['previous_block'],
541                     merkle_root=merkle_root,
542                     timestamp=current_work2.value['time'],
543                     bits=current_work.value['bits'],
544                     share_target=target,
545                 )
546                 
547                 def got_response(header, request):
548                     assert header['merkle_root'] == merkle_root
549                     
550                     pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
551                     on_time = current_work.value['best_share_hash'] == share_info['share_data']['previous_share_hash']
552                     
553                     try:
554                         if pow_hash <= header['bits'].target or p2pool.DEBUG:
555                             @deferral.retry('Error submitting primary block: (will retry)', 10, 10)
556                             def submit_block():
557                                 if factory.conn.value is None:
558                                     print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Hash: %x' % (bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)),)
559                                     raise deferral.RetrySilentlyException()
560                                 factory.conn.value.send_block(block=dict(header=header, txs=transactions))
561                             submit_block()
562                             if pow_hash <= header['bits'].target:
563                                 print
564                                 print 'GOT BLOCK FROM MINER! Passing to bitcoind! bitcoin: %x' % (bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)),)
565                                 print
566                                 recent_blocks.append({ 'ts': time.time(), 'hash': '%x' % (bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)),) })
567                     except:
568                         log.err(None, 'Error while processing potential block:')
569                     
570                     for aux_work, index, hashes in mm_later:
571                         try:
572                             if pow_hash <= aux_work['target'] or p2pool.DEBUG:
573                                 df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)(
574                                     pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
575                                     bitcoin_data.aux_pow_type.pack(dict(
576                                         merkle_tx=dict(
577                                             tx=transactions[0],
578                                             block_hash=bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)),
579                                             merkle_branch=merkle_branch,
580                                             index=0,
581                                         ),
582                                         merkle_branch=bitcoin_data.calculate_merkle_branch(hashes, index),
583                                         index=index,
584                                         parent_block_header=header,
585                                     )).encode('hex'),
586                                 )
587                                 @df.addCallback
588                                 def _(result):
589                                     if result != (pow_hash <= aux_work['target']):
590                                         print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
591                                     else:
592                                         print 'Merged block submittal result: %s' % (result,)
593                                 @df.addErrback
594                                 def _(err):
595                                     log.err(err, 'Error submitting merged block:')
596                         except:
597                             log.err(None, 'Error while processing merged mining POW:')
598                     
599                     if pow_hash <= share_info['bits'].target:
600                         share = p2pool_data.Share(net, header, share_info, merkle_branch=merkle_branch, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
601                         print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
602                             request.getUser(),
603                             p2pool_data.format_hash(share.hash),
604                             p2pool_data.format_hash(share.previous_hash),
605                             time.time() - getwork_time,
606                             ' DEAD ON ARRIVAL' if not on_time else '',
607                         )
608                         my_share_hashes.add(share.hash)
609                         if not on_time:
610                             my_doa_share_hashes.add(share.hash)
611                         
612                         tracker.add(share)
613                         if not p2pool.DEBUG:
614                             tracker.verified.add(share)
615                         set_real_work2()
616                         
617                         try:
618                             if pow_hash <= header['bits'].target or p2pool.DEBUG:
619                                 for peer in p2p_node.peers.itervalues():
620                                     peer.sendShares([share])
621                                 shared_share_hashes.add(share.hash)
622                         except:
623                             log.err(None, 'Error forwarding block solution:')
624                     
625                     if pow_hash <= target:
626                         reactor.callLater(1, grapher.add_localrate_point, bitcoin_data.target_to_average_attempts(target), not on_time)
627                         if request.getPassword() == vip_pass:
628                             reactor.callLater(1, grapher.add_localminer_point, request.getUser(), bitcoin_data.target_to_average_attempts(target), not on_time)
629                         self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
630                         while len(self.recent_shares_ts_work) > 50:
631                             self.recent_shares_ts_work.pop(0)
632                         recent_shares_ts_work2.append((time.time(), bitcoin_data.target_to_average_attempts(target), not on_time, request.getUser()))
633                     
634                     
635                     if pow_hash > target:
636                         print 'Worker %s submitted share with hash > target:' % (request.getUser(),)
637                         print '    Hash:   %56x' % (pow_hash,)
638                         print '    Target: %56x' % (target,)
639                     
640                     return on_time
641                 
642                 return ba, got_response
643         
644         web_root = resource.Resource()
645         worker_interface.WorkerInterface(WorkerBridge()).attach_to(web_root)
646         
647         def get_rate():
648             if tracker.get_height(current_work.value['best_share_hash']) < 720:
649                 return json.dumps(None)
650             return json.dumps(p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
651                 / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720)))
652         
653         def get_users():
654             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
655             weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
656             res = {}
657             for script in sorted(weights, key=lambda s: weights[s]):
658                 res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
659             return json.dumps(res)
660         
661         def get_current_txouts():
662             share = tracker.shares[current_work.value['best_share_hash']]
663             share_info, gentx = p2pool_data.generate_transaction(tracker, share.share_info['share_data'], share.header['bits'].target, share.share_info['timestamp'], share.net)
664             return dict((out['script'], out['value']) for out in gentx['tx_outs'])
665         
666         def get_current_scaled_txouts(scale, trunc=0):
667             txouts = get_current_txouts()
668             total = sum(txouts.itervalues())
669             results = dict((script, value*scale//total) for script, value in txouts.iteritems())
670             if trunc > 0:
671                 total_random = 0
672                 random_set = set()
673                 for s in sorted(results, key=results.__getitem__):
674                     if results[s] >= trunc:
675                         break
676                     total_random += results[s]
677                     random_set.add(s)
678                 if total_random:
679                     winner = math.weighted_choice((script, results[script]) for script in random_set)
680                     for script in random_set:
681                         del results[script]
682                     results[winner] = total_random
683             if sum(results.itervalues()) < int(scale):
684                 results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
685             return results
686         
687         def get_current_payouts():
688             return json.dumps(dict((bitcoin_data.script2_to_human(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems()))
689         
690         def get_patron_sendmany(this):
691             try:
692                 if '/' in this:
693                     this, trunc = this.split('/', 1)
694                 else:
695                     trunc = '0.01'
696                 return json.dumps(dict(
697                     (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
698                     for script, value in get_current_scaled_txouts(scale=int(float(this)*1e8), trunc=int(float(trunc)*1e8)).iteritems()
699                     if bitcoin_data.script2_to_address(script, net.PARENT) is not None
700                 ))
701             except:
702                 return json.dumps(None)
703         
704         def get_global_stats():
705             # averaged over last hour
706             lookbehind = 3600//net.SHARE_PERIOD
707             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
708                 return None
709             
710             nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
711             stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
712             return json.dumps(dict(
713                 pool_nonstale_hash_rate=nonstale_hash_rate,
714                 pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
715                 pool_stale_prop=stale_prop,
716             ))
717         
718         def get_local_stats():
719             lookbehind = 3600//net.SHARE_PERIOD
720             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
721                 return None
722             
723             global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
724             
725             my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
726             my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
727             my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
728             my_share_count = my_unstale_count + my_orphan_count + my_doa_count
729             my_stale_count = my_orphan_count + my_doa_count
730             
731             my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
732             
733             my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
734                 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1)
735                 if share.hash in my_share_hashes)
736             actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp -
737                 tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp)
738             share_att_s = my_work / actual_time
739             
740             miner_hash_rates = {}
741             miner_dead_hash_rates = {}
742             for ts, work, dead, user in recent_shares_ts_work2:
743                 miner_hash_rates[user] = miner_hash_rates.get(user, 0) + work/600
744                 if dead:
745                     miner_dead_hash_rates[user] = miner_hash_rates.get(user, 0) + work/600
746             
747             return json.dumps(dict(
748                 my_hash_rates_in_last_hour=dict(
749                     note="DEPRECATED",
750                     nonstale=share_att_s,
751                     rewarded=share_att_s/(1 - global_stale_prop),
752                     actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
753                 ),
754                 my_share_counts_in_last_hour=dict(
755                     shares=my_share_count,
756                     unstale_shares=my_unstale_count,
757                     stale_shares=my_stale_count,
758                     orphan_stale_shares=my_orphan_count,
759                     doa_stale_shares=my_doa_count,
760                 ),
761                 my_stale_proportions_in_last_hour=dict(
762                     stale=my_stale_prop,
763                     orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
764                     dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
765                 ),
766                 miner_hash_rates=miner_hash_rates,
767                 miner_dead_hash_rates=miner_dead_hash_rates,
768             ))
769         
770         def get_peer_addresses():
771             return ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues())
772         
773         def get_uptime():
774             return json.dumps(time.time() - start_time)
775         
776         class WebInterface(resource.Resource):
777             def __init__(self, func, mime_type, *fields):
778                 self.func, self.mime_type, self.fields = func, mime_type, fields
779             
780             def render_GET(self, request):
781                 request.setHeader('Content-Type', self.mime_type)
782                 request.setHeader('Access-Control-Allow-Origin', '*')
783                 return self.func(*(request.args[field][0] for field in self.fields))
784         
785         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
786         web_root.putChild('users', WebInterface(get_users, 'application/json'))
787         web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
788         web_root.putChild('current_payouts', WebInterface(get_current_payouts, 'application/json'))
789         web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain', 'total'))
790         web_root.putChild('global_stats', WebInterface(get_global_stats, 'application/json'))
791         web_root.putChild('local_stats', WebInterface(get_local_stats, 'application/json'))
792         web_root.putChild('peer_addresses', WebInterface(get_peer_addresses, 'text/plain'))
793         web_root.putChild('payout_addr', WebInterface(lambda: json.dumps(bitcoin_data.script2_to_human(my_script, net.PARENT)), 'application/json'))
794         web_root.putChild('recent_blocks', WebInterface(lambda: json.dumps(recent_blocks), 'application/json'))
795         web_root.putChild('uptime', WebInterface(get_uptime, 'application/json'))
796         if draw is not None:
797             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
798         
799         grapher = graphs.Grapher(os.path.join(datadir_path, 'rrd'))
800         web_root.putChild('graphs', grapher.get_resource())
801         def add_point():
802             if tracker.get_height(current_work.value['best_share_hash']) < 720:
803                 return
804             nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
805             poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
806             grapher.add_poolrate_point(poolrate, poolrate - nonstalerate)
807         task.LoopingCall(add_point).start(100)
808         
809         def attempt_listen():
810             try:
811                 reactor.listenTCP(worker_endpoint[1], server.Site(web_root), interface=worker_endpoint[0])
812             except error.CannotListenError, e:
813                 print >>sys.stderr, 'Error binding to worker port: %s. Retrying in 1 second.' % (e.socketError,)
814                 reactor.callLater(1, attempt_listen)
815             else:
816                 with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
817                     pass
818         attempt_listen()
819         
820         print '    ...success!'
821         print
822         
823         
824         @defer.inlineCallbacks
825         def work_poller():
826             while True:
827                 flag = factory.new_block.get_deferred()
828                 try:
829                     yield set_real_work1()
830                 except:
831                     log.err()
832                 yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
833         work_poller()
834         
835         
836         # done!
837         print 'Started successfully!'
838         print
839         
840         
841         if hasattr(signal, 'SIGALRM'):
842             signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
843                 sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
844             ))
845             signal.siginterrupt(signal.SIGALRM, False)
846             task.LoopingCall(signal.alarm, 30).start(1)
847         
848         if args.irc_announce:
849             from twisted.words.protocols import irc
850             class IRCClient(irc.IRCClient):
851                 nickname = 'p2pool'
852                 def lineReceived(self, line):
853                     print repr(line)
854                     irc.IRCClient.lineReceived(self, line)
855                 def signedOn(self):
856                     irc.IRCClient.signedOn(self)
857                     self.factory.resetDelay()
858                     self.join('#p2pool')
859                     self.watch_id = tracker.verified.added.watch(self._new_share)
860                     self.announced_hashes = set()
861                 def _new_share(self, share):
862                     if share.pow_hash <= share.header['bits'].target and share.header_hash not in self.announced_hashes and abs(share.timestamp - time.time()) < 10*60:
863                         self.announced_hashes.add(share.header_hash)
864                         self.say('#p2pool', '\x02BLOCK FOUND by %s! http://blockexplorer.com/block/%064x' % (bitcoin_data.script2_to_address(share.share_data['new_script'], net.PARENT), share.header_hash))
865                 def connectionLost(self, reason):
866                     tracker.verified.added.unwatch(self.watch_id)
867                     print 'IRC connection lost:', reason.getErrorMessage()
868             class IRCClientFactory(protocol.ReconnectingClientFactory):
869                 protocol = IRCClient
870             reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())
871         
872         @defer.inlineCallbacks
873         def status_thread():
874             average_period = 600
875             first_pseudoshare_time = None
876             
877             last_str = None
878             last_time = 0
879             while True:
880                 yield deferral.sleep(3)
881                 try:
882                     if time.time() > current_work2.value['last_update'] + 60:
883                         print >>sys.stderr, '''---> LOST CONTACT WITH BITCOIND for %s! Check that it isn't frozen or dead! <---''' % (math.format_dt(time.time() - current_work2.value['last_update']),)
884                     
885                     height = tracker.get_height(current_work.value['best_share_hash'])
886                     this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
887                         height,
888                         len(tracker.verified.shares),
889                         len(tracker.shares),
890                         len(p2p_node.peers),
891                         sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
892                     ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
893                     
894                     if first_pseudoshare_time is None and recent_shares_ts_work2:
895                         first_pseudoshare_time = recent_shares_ts_work2[0][0]
896                     while recent_shares_ts_work2 and recent_shares_ts_work2[0][0] < time.time() - average_period:
897                         recent_shares_ts_work2.pop(0)
898                     my_att_s = sum(work for ts, work, dead, user in recent_shares_ts_work2)/min(time.time() - first_pseudoshare_time, average_period) if first_pseudoshare_time is not None else 0
899                     this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
900                         math.format(int(my_att_s)),
901                         math.format_dt(min(time.time() - first_pseudoshare_time, average_period) if first_pseudoshare_time is not None else 0),
902                         math.format_binomial_conf(sum(1 for tx, work, dead, user in recent_shares_ts_work2 if dead), len(recent_shares_ts_work2), 0.95),
903                         math.format_dt(2**256 / tracker.shares[current_work.value['best_share_hash']].target / my_att_s) if my_att_s else '???',
904                     )
905                     
906                     if height > 2:
907                         (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
908                         stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], min(720, height))
909                         real_att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720)) / (1 - stale_prop)
910                         
911                         this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % (
912                             shares, stale_orphan_shares, stale_doa_shares,
913                             math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
914                             math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
915                             get_current_txouts().get(my_script, 0)*1e-8, net.PARENT.SYMBOL,
916                         )
917                         this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
918                             math.format(int(real_att_s)),
919                             100*stale_prop,
920                             math.format_dt(2**256 / current_work.value['bits'].target / real_att_s),
921                         )
922                     
923                     if this_str != last_str or time.time() > last_time + 15:
924                         print this_str
925                         last_str = this_str
926                         last_time = time.time()
927                 except:
928                     log.err()
929         status_thread()
930     except:
931         log.err(None, 'Fatal error:')
932         reactor.stop()
933
934 def run():
935     class FixedArgumentParser(argparse.ArgumentParser):
936         def _read_args_from_files(self, arg_strings):
937             # expand arguments referencing files
938             new_arg_strings = []
939             for arg_string in arg_strings:
940                 
941                 # for regular arguments, just add them back into the list
942                 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
943                     new_arg_strings.append(arg_string)
944                 
945                 # replace arguments referencing files with the file content
946                 else:
947                     try:
948                         args_file = open(arg_string[1:])
949                         try:
950                             arg_strings = []
951                             for arg_line in args_file.read().splitlines():
952                                 for arg in self.convert_arg_line_to_args(arg_line):
953                                     arg_strings.append(arg)
954                             arg_strings = self._read_args_from_files(arg_strings)
955                             new_arg_strings.extend(arg_strings)
956                         finally:
957                             args_file.close()
958                     except IOError:
959                         err = sys.exc_info()[1]
960                         self.error(str(err))
961             
962             # return the modified argument list
963             return new_arg_strings
964         
965         def convert_arg_line_to_args(self, arg_line):
966             return [arg for arg in arg_line.split() if arg.strip()]
967     
968     
969     realnets=dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
970     
971     parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
972     parser.add_argument('--version', action='version', version=p2pool.__version__)
973     parser.add_argument('--net',
974         help='use specified network (default: bitcoin)',
975         action='store', choices=sorted(realnets), default='bitcoin', dest='net_name')
976     parser.add_argument('--testnet',
977         help='''use the network's testnet''',
978         action='store_const', const=True, default=False, dest='testnet')
979     parser.add_argument('--debug',
980         help='enable debugging mode',
981         action='store_const', const=True, default=False, dest='debug')
982     parser.add_argument('-a', '--address',
983         help='generate payouts to this address (default: <address requested from bitcoind>)',
984         type=str, action='store', default=None, dest='address')
985     parser.add_argument('--datadir',
986         help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
987         type=str, action='store', default=None, dest='datadir')
988     parser.add_argument('--logfile',
989         help='''log to this file (default: data/<NET>/log)''',
990         type=str, action='store', default=None, dest='logfile')
991     parser.add_argument('--merged',
992         help='call getauxblock on this url to get work for merged mining (example: http://ncuser:ncpass@127.0.0.1:10332/)',
993         type=str, action='append', default=[], dest='merged_urls')
994     parser.add_argument('--merged-url',
995         help='DEPRECATED, use --merged',
996         type=str, action='store', default=None, dest='merged_url')
997     parser.add_argument('--merged-userpass',
998         help='DEPRECATED, use --merged',
999         type=str, action='store', default=None, dest='merged_userpass')
1000     parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
1001         help='donate this percentage of work to author of p2pool (default: 0.5)',
1002         type=float, action='store', default=0.5, dest='donation_percentage')
1003     parser.add_argument('--irc-announce',
1004         help='announce any blocks found on irc://irc.freenode.net/#p2pool',
1005         action='store_true', default=False, dest='irc_announce')
1006     
1007     p2pool_group = parser.add_argument_group('p2pool interface')
1008     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
1009         help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
1010         type=int, action='store', default=None, dest='p2pool_port')
1011     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
1012         help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
1013         type=str, action='append', default=[], dest='p2pool_nodes')
1014     parser.add_argument('--disable-upnp',
1015         help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
1016         action='store_false', default=True, dest='upnp')
1017     
1018     worker_group = parser.add_argument_group('worker interface')
1019     worker_group.add_argument('-w', '--worker-port', metavar='[ADDR:]PORT',
1020         help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
1021         type=str, action='store', default=None, dest='worker_endpoint')
1022     worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
1023         help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
1024         type=float, action='store', default=0, dest='worker_fee')
1025     
1026     bitcoind_group = parser.add_argument_group('bitcoind interface')
1027     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
1028         help='connect to this address (default: 127.0.0.1)',
1029         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
1030     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
1031         help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
1032         type=int, action='store', default=None, dest='bitcoind_rpc_port')
1033     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
1034         help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
1035         type=int, action='store', default=None, dest='bitcoind_p2p_port')
1036     
1037     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
1038         help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
1039         type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
1040     
1041     args = parser.parse_args()
1042     
1043     if args.debug:
1044         p2pool.DEBUG = True
1045     
1046     net_name = args.net_name + ('_testnet' if args.testnet else '')
1047     net = networks.nets[net_name]
1048     
1049     datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
1050     if not os.path.exists(datadir_path):
1051         os.makedirs(datadir_path)
1052     
1053     if len(args.bitcoind_rpc_userpass) > 2:
1054         parser.error('a maximum of two arguments are allowed')
1055     args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
1056     
1057     if args.bitcoind_rpc_password is None:
1058         if not hasattr(net.PARENT, 'CONF_FILE_FUNC'):
1059             parser.error('This network has no configuration file function. Manually enter your RPC password.')
1060         conf_path = net.PARENT.CONF_FILE_FUNC()
1061         if not os.path.exists(conf_path):
1062             parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
1063                 '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
1064                 '''\r\n'''
1065                 '''server=1\r\n'''
1066                 '''rpcpassword=%x''' % (conf_path, random.randrange(2**128)))
1067         with open(conf_path, 'rb') as f:
1068             cp = ConfigParser.RawConfigParser()
1069             cp.readfp(StringIO.StringIO('[x]\r\n' + f.read()))
1070             for conf_name, var_name, var_type in [
1071                 ('rpcuser', 'bitcoind_rpc_username', str),
1072                 ('rpcpassword', 'bitcoind_rpc_password', str),
1073                 ('rpcport', 'bitcoind_rpc_port', int),
1074                 ('port', 'bitcoind_p2p_port', int),
1075             ]:
1076                 if getattr(args, var_name) is None and cp.has_option('x', conf_name):
1077                     setattr(args, var_name, var_type(cp.get('x', conf_name)))
1078     
1079     if args.bitcoind_rpc_username is None:
1080         args.bitcoind_rpc_username = ''
1081     
1082     if args.bitcoind_rpc_port is None:
1083         args.bitcoind_rpc_port = net.PARENT.RPC_PORT
1084     
1085     if args.bitcoind_p2p_port is None:
1086         args.bitcoind_p2p_port = net.PARENT.P2P_PORT
1087     
1088     if args.p2pool_port is None:
1089         args.p2pool_port = net.P2P_PORT
1090     
1091     if args.worker_endpoint is None:
1092         worker_endpoint = '', net.WORKER_PORT
1093     elif ':' not in args.worker_endpoint:
1094         worker_endpoint = '', int(args.worker_endpoint)
1095     else:
1096         addr, port = args.worker_endpoint.rsplit(':', 1)
1097         worker_endpoint = addr, int(port)
1098     
1099     if args.address is not None:
1100         try:
1101             args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
1102         except Exception, e:
1103             parser.error('error parsing address: ' + repr(e))
1104     else:
1105         args.pubkey_hash = None
1106     
1107     def separate_url(url):
1108         s = urlparse.urlsplit(url)
1109         if '@' not in s.netloc:
1110             parser.error('merged url netloc must contain an "@"')
1111         userpass, new_netloc = s.netloc.rsplit('@', 1)
1112         return urlparse.urlunsplit(s._replace(netloc=new_netloc)), userpass
1113     merged_urls = map(separate_url, args.merged_urls)
1114     
1115     if args.merged_url is not None or args.merged_userpass is not None:
1116         print '--merged-url and --merged-userpass are deprecated! Use --merged http://USER:PASS@HOST:PORT/ instead!'
1117         print 'Pausing 10 seconds...'
1118         time.sleep(10)
1119         
1120         if args.merged_url is None or args.merged_userpass is None:
1121             parser.error('must specify both --merged-url and --merged-userpass')
1122         
1123         merged_urls = merged_urls + [(args.merged_url, args.merged_userpass)]
1124     
1125     
1126     if args.logfile is None:
1127         args.logfile = os.path.join(datadir_path, 'log')
1128     
1129     logfile = logging.LogFile(args.logfile)
1130     pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
1131     sys.stdout = logging.AbortPipe(pipe)
1132     sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
1133     if hasattr(signal, "SIGUSR1"):
1134         def sigusr1(signum, frame):
1135             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
1136             logfile.reopen()
1137             print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
1138         signal.signal(signal.SIGUSR1, sigusr1)
1139     task.LoopingCall(logfile.reopen).start(5)
1140     
1141     reactor.callWhenRunning(main, args, net, datadir_path, merged_urls, worker_endpoint)
1142     reactor.run()