added miner_hash_rate and miner_dead_hash_rate to /local_stats, using pseudoshares...
[p2pool.git] / p2pool / main.py
1 from __future__ import division
2
3 import ConfigParser
4 import StringIO
5 import argparse
6 import os
7 import random
8 import struct
9 import sys
10 import time
11 import json
12 import signal
13 import traceback
14 import urlparse
15
16 from twisted.internet import defer, error, reactor, protocol, task
17 from twisted.web import server, resource
18 from twisted.python import log
19 from nattraverso import portmapper, ipdiscover
20
21 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
22 from bitcoin import worker_interface
23 from util import expiring_dict, jsonrpc, variable, deferral, math, logging, pack
24 from . import p2p, networks, graphs
25 import p2pool, p2pool.data as p2pool_data
26
27 @deferral.retry('Error getting work from bitcoind:', 3)
28 @defer.inlineCallbacks
29 def getwork(bitcoind):
30     try:
31         work = yield bitcoind.rpc_getmemorypool()
32     except jsonrpc.Error, e:
33         if e.code == -32601: # Method not found
34             print >>sys.stderr, 'Error: Bitcoin version too old! Upgrade to v0.5 or newer!'
35             raise deferral.RetrySilentlyException()
36         raise
37     packed_transactions = [x.decode('hex') for x in work['transactions']]
38     defer.returnValue(dict(
39         version=work['version'],
40         previous_block_hash=int(work['previousblockhash'], 16),
41         transactions=map(bitcoin_data.tx_type.unpack, packed_transactions),
42         merkle_branch=bitcoin_data.calculate_merkle_branch([0] + map(bitcoin_data.hash256, packed_transactions), 0),
43         subsidy=work['coinbasevalue'],
44         time=work['time'],
45         bits=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
46         coinbaseflags=work['coinbaseflags'].decode('hex') if 'coinbaseflags' in work else ''.join(x.decode('hex') for x in work['coinbaseaux'].itervalues()) if 'coinbaseaux' in work else '',
47     ))
48
49 @defer.inlineCallbacks
50 def main(args, net, datadir_path, merged_urls):
51     try:
52         print 'p2pool (version %s)' % (p2pool.__version__,)
53         print
54         try:
55             from . import draw
56         except ImportError:
57             draw = None
58             print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
59             print
60         
61         # connect to bitcoind over JSON-RPC and do initial getmemorypool
62         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
63         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
64         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
65         good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.PARENT.RPC_CHECK)(bitcoind)
66         if not good:
67             print >>sys.stderr, "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
68             return
69         temp_work = yield getwork(bitcoind)
70         print '    ...success!'
71         print '    Current block hash: %x' % (temp_work['previous_block_hash'],)
72         print
73         
74         # connect to bitcoind over bitcoin-p2p
75         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
76         factory = bitcoin_p2p.ClientFactory(net.PARENT)
77         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
78         yield factory.getProtocol() # waits until handshake is successful
79         print '    ...success!'
80         print
81         
82         print 'Determining payout script...'
83         if args.pubkey_hash is None:
84             address_path = os.path.join(datadir_path, 'cached_payout_address')
85             
86             if os.path.exists(address_path):
87                 with open(address_path, 'rb') as f:
88                     address = f.read().strip('\r\n')
89                 print '    Loaded cached address: %s...' % (address,)
90             else:
91                 address = None
92             
93             if address is not None:
94                 res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))()
95                 if not res['isvalid'] or not res['ismine']:
96                     print '    Cached address is either invalid or not controlled by local bitcoind!'
97                     address = None
98             
99             if address is None:
100                 print '    Getting payout address from bitcoind...'
101                 address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()
102             
103             with open(address_path, 'wb') as f:
104                 f.write(address)
105             
106             my_script = bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(address, net.PARENT))
107         else:
108             print '    ...Computing payout script from provided address...'
109             my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
110         print '    ...success! Payout script:', bitcoin_data.script2_to_human(my_script, net.PARENT)
111         print
112         
113         my_share_hashes = set()
114         my_doa_share_hashes = set()
115         
116         tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes)
117         shared_share_hashes = set()
118         ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
119         known_verified = set()
120         recent_blocks = []
121         print "Loading shares..."
122         for i, (mode, contents) in enumerate(ss.get_shares()):
123             if mode == 'share':
124                 if contents.hash in tracker.shares:
125                     continue
126                 shared_share_hashes.add(contents.hash)
127                 contents.time_seen = 0
128                 tracker.add(contents)
129                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
130                     print "    %i" % (len(tracker.shares),)
131             elif mode == 'verified_hash':
132                 known_verified.add(contents)
133             else:
134                 raise AssertionError()
135         print "    ...inserting %i verified shares..." % (len(known_verified),)
136         for h in known_verified:
137             if h not in tracker.shares:
138                 ss.forget_verified_share(h)
139                 continue
140             tracker.verified.add(tracker.shares[h])
141         print "    ...done loading %i shares!" % (len(tracker.shares),)
142         print
143         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
144         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
145         tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
146         
147         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
148         
149         pre_current_work = variable.Variable(None)
150         pre_merged_work = variable.Variable({})
151         # information affecting work that should trigger a long-polling update
152         current_work = variable.Variable(None)
153         # information affecting work that should not trigger a long-polling update
154         current_work2 = variable.Variable(None)
155         
156         requested = expiring_dict.ExpiringDict(300)
157         
158         @defer.inlineCallbacks
159         def set_real_work1():
160             work = yield getwork(bitcoind)
161             current_work2.set(dict(
162                 time=work['time'],
163                 transactions=work['transactions'],
164                 merkle_branch=work['merkle_branch'],
165                 subsidy=work['subsidy'],
166                 clock_offset=time.time() - work['time'],
167                 last_update=time.time(),
168             )) # second set first because everything hooks on the first
169             pre_current_work.set(dict(
170                 version=work['version'],
171                 previous_block=work['previous_block_hash'],
172                 bits=work['bits'],
173                 coinbaseflags=work['coinbaseflags'],
174             ))
175         
176         if '\ngetblock ' in (yield bitcoind.rpc_help()):
177             height_cacher = deferral.DeferredCacher(defer.inlineCallbacks(lambda block_hash: defer.returnValue((yield bitcoind.rpc_getblock('%x' % (block_hash,)))['blockcount'])))
178             def get_height_rel_highest(block_hash):
179                 return height_cacher.call_now(block_hash, 0) - height_cacher.call_now(pre_current_work.value['previous_block'], 1000000000)
180         else:
181             get_height_rel_highest = bitcoin_p2p.HeightTracker(bitcoind, factory).get_height_rel_highest
182         
183         def set_real_work2():
184             best, desired = tracker.think(get_height_rel_highest, pre_current_work.value['previous_block'], pre_current_work.value['bits'])
185             
186             t = dict(pre_current_work.value)
187             t['best_share_hash'] = best
188             t['mm_chains'] = pre_merged_work.value
189             current_work.set(t)
190             
191             t = time.time()
192             for peer2, share_hash in desired:
193                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
194                     continue
195                 last_request_time, count = requested.get(share_hash, (None, 0))
196                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
197                     continue
198                 potential_peers = set()
199                 for head in tracker.tails[share_hash]:
200                     potential_peers.update(peer_heads.get(head, set()))
201                 potential_peers = [peer for peer in potential_peers if peer.connected2]
202                 if count == 0 and peer2 is not None and peer2.connected2:
203                     peer = peer2
204                 else:
205                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
206                     if peer is None:
207                         continue
208                 
209                 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
210                 peer.send_getshares(
211                     hashes=[share_hash],
212                     parents=2000,
213                     stops=list(set(tracker.heads) | set(
214                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
215                     ))[:100],
216                 )
217                 requested[share_hash] = t, count + 1
218         pre_current_work.changed.watch(lambda _: set_real_work2())
219         
220         print 'Initializing work...'
221         yield set_real_work1()
222         print '    ...success!'
223         print
224         
225         pre_merged_work.changed.watch(lambda _: set_real_work2())
226         
227         
228         @defer.inlineCallbacks
229         def set_merged_work(merged_url, merged_userpass):
230             merged_proxy = jsonrpc.Proxy(merged_url, (merged_userpass,))
231             while True:
232                 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged_proxy.rpc_getauxblock)()
233                 pre_merged_work.set(dict(pre_merged_work.value, **{auxblock['chainid']: dict(
234                     hash=int(auxblock['hash'], 16),
235                     target=pack.IntType(256).unpack(auxblock['target'].decode('hex')),
236                     merged_proxy=merged_proxy,
237                 )}))
238                 yield deferral.sleep(1)
239         for merged_url, merged_userpass in merged_urls:
240             set_merged_work(merged_url, merged_userpass)
241         
242         @pre_merged_work.changed.watch
243         def _(new_merged_work):
244             print 'Got new merged mining work!'
245         
246         # setup p2p logic and join p2pool network
247         
248         class Node(p2p.Node):
249             def handle_shares(self, shares, peer):
250                 if len(shares) > 5:
251                     print 'Processing %i shares from %s...' % (len(shares), '%s:%i' % peer.addr if peer is not None else None)
252                 
253                 new_count = 0
254                 for share in shares:
255                     if share.hash in tracker.shares:
256                         #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
257                         continue
258                     
259                     new_count += 1
260                     
261                     #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
262                     
263                     tracker.add(share)
264                 
265                 if shares and peer is not None:
266                     peer_heads.setdefault(shares[0].hash, set()).add(peer)
267                 
268                 if new_count:
269                     set_real_work2()
270                 
271                 if len(shares) > 5:
272                     print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
273             
274             def handle_share_hashes(self, hashes, peer):
275                 t = time.time()
276                 get_hashes = []
277                 for share_hash in hashes:
278                     if share_hash in tracker.shares:
279                         continue
280                     last_request_time, count = requested.get(share_hash, (None, 0))
281                     if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
282                         continue
283                     print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
284                     get_hashes.append(share_hash)
285                     requested[share_hash] = t, count + 1
286                 
287                 if hashes and peer is not None:
288                     peer_heads.setdefault(hashes[0], set()).add(peer)
289                 if get_hashes:
290                     peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
291             
292             def handle_get_shares(self, hashes, parents, stops, peer):
293                 parents = min(parents, 1000//len(hashes))
294                 stops = set(stops)
295                 shares = []
296                 for share_hash in hashes:
297                     for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))):
298                         if share.hash in stops:
299                             break
300                         shares.append(share)
301                 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
302                 peer.sendShares(shares)
303         
304         @tracker.verified.added.watch
305         def _(share):
306             if share.pow_hash <= share.header['bits'].target:
307                 if factory.conn.value is not None:
308                     factory.conn.value.send_block(block=share.as_block(tracker))
309                 else:
310                     print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Erp!'
311                 print
312                 print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %x' % (p2pool_data.format_hash(share.hash), share.header_hash)
313                 print
314                 recent_blocks.append({ 'ts': share.timestamp, 'hash': '%x' % (share.header_hash) })
315         
316         print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
317         
318         @defer.inlineCallbacks
319         def parse(x):
320             if ':' in x:
321                 ip, port = x.split(':')
322                 defer.returnValue(((yield reactor.resolve(ip)), int(port)))
323             else:
324                 defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT))
325         
326         addrs = {}
327         if os.path.exists(os.path.join(datadir_path, 'addrs.txt')):
328             try:
329                 addrs.update(dict(eval(x) for x in open(os.path.join(datadir_path, 'addrs.txt'))))
330             except:
331                 print >>sys.stderr, "error reading addrs"
332         for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
333             try:
334                 addr = yield addr_df
335                 if addr not in addrs:
336                     addrs[addr] = (0, time.time(), time.time())
337             except:
338                 log.err()
339         
340         connect_addrs = set()
341         for addr_df in map(parse, args.p2pool_nodes):
342             try:
343                 connect_addrs.add((yield addr_df))
344             except:
345                 log.err()
346         
347         p2p_node = Node(
348             best_share_hash_func=lambda: current_work.value['best_share_hash'],
349             port=args.p2pool_port,
350             net=net,
351             addr_store=addrs,
352             connect_addrs=connect_addrs,
353         )
354         p2p_node.start()
355         
356         def save_addrs():
357             open(os.path.join(datadir_path, 'addrs.txt'), 'w').writelines(repr(x) + '\n' for x in p2p_node.addr_store.iteritems())
358         task.LoopingCall(save_addrs).start(60)
359         
360         # send share when the chain changes to their chain
361         def work_changed(new_work):
362             #print 'Work changed:', new_work
363             shares = []
364             for share in tracker.get_chain(new_work['best_share_hash'], min(5, tracker.get_height(new_work['best_share_hash']))):
365                 if share.hash in shared_share_hashes:
366                     break
367                 shared_share_hashes.add(share.hash)
368                 shares.append(share)
369             
370             for peer in p2p_node.peers.itervalues():
371                 peer.sendShares([share for share in shares if share.peer is not peer])
372         
373         current_work.changed.watch(work_changed)
374         
375         def save_shares():
376             for share in tracker.get_chain(current_work.value['best_share_hash'], min(tracker.get_height(current_work.value['best_share_hash']), 2*net.CHAIN_LENGTH)):
377                 ss.add_share(share)
378                 if share.hash in tracker.verified.shares:
379                     ss.add_verified_hash(share.hash)
380         task.LoopingCall(save_shares).start(60)
381         
382         print '    ...success!'
383         print
384         
385         start_time = time.time()
386         
387         @defer.inlineCallbacks
388         def upnp_thread():
389             while True:
390                 try:
391                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
392                     if is_lan:
393                         pm = yield portmapper.get_port_mapper()
394                         yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
395                 except defer.TimeoutError:
396                     pass
397                 except:
398                     if p2pool.DEBUG:
399                         log.err(None, "UPnP error:")
400                 yield deferral.sleep(random.expovariate(1/120))
401         
402         if args.upnp:
403             upnp_thread()
404         
405         # start listening for workers with a JSON-RPC server
406         
407         print 'Listening for workers on port %i...' % (args.worker_port,)
408         
409         if os.path.exists(os.path.join(datadir_path, 'vip_pass')):
410             with open(os.path.join(datadir_path, 'vip_pass'), 'rb') as f:
411                 vip_pass = f.read().strip('\r\n')
412         else:
413             vip_pass = '%016x' % (random.randrange(2**64),)
414             with open(os.path.join(datadir_path, 'vip_pass'), 'wb') as f:
415                 f.write(vip_pass)
416         print '    Worker password:', vip_pass, '(only required for generating graphs)'
417         
418         # setup worker logic
419         
420         removed_unstales_var = variable.Variable((0, 0, 0))
421         @tracker.verified.removed.watch
422         def _(share):
423             if share.hash in my_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
424                 assert share.share_data['stale_info'] in [0, 253, 254] # we made these shares in this instance
425                 removed_unstales_var.set((
426                     removed_unstales_var.value[0] + 1,
427                     removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 253 else 0),
428                     removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 254 else 0),
429                 ))
430         
431         removed_doa_unstales_var = variable.Variable(0)
432         @tracker.verified.removed.watch
433         def _(share):
434             if share.hash in my_doa_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
435                 removed_doa_unstales.set(removed_doa_unstales.value + 1)
436         
437         def get_stale_counts():
438             '''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)'''
439             my_shares = len(my_share_hashes)
440             my_doa_shares = len(my_doa_share_hashes)
441             delta = tracker.verified.get_delta(current_work.value['best_share_hash'])
442             my_shares_in_chain = delta.my_count + removed_unstales_var.value[0]
443             my_doa_shares_in_chain = delta.my_doa_count + removed_doa_unstales_var.value
444             orphans_recorded_in_chain = delta.my_orphan_announce_count + removed_unstales_var.value[1]
445             doas_recorded_in_chain = delta.my_dead_announce_count + removed_unstales_var.value[2]
446             
447             my_shares_not_in_chain = my_shares - my_shares_in_chain
448             my_doa_shares_not_in_chain = my_doa_shares - my_doa_shares_in_chain
449             
450             return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
451         
452         
453         recent_shares_ts_work2 = []
454         
455         class WorkerBridge(worker_interface.WorkerBridge):
456             def __init__(self):
457                 worker_interface.WorkerBridge.__init__(self)
458                 self.new_work_event = current_work.changed
459                 self.recent_shares_ts_work = []
460             
461             def _get_payout_script_from_username(self, user):
462                 if user is None:
463                     return None
464                 try:
465                     pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, net.PARENT)
466                 except: # XXX blah
467                     return None
468                 return bitcoin_data.pubkey_hash_to_script2(pubkey_hash)
469             
470             def preprocess_request(self, request):
471                 payout_script = self._get_payout_script_from_username(request.getUser())
472                 if payout_script is None or random.uniform(0, 100) < args.worker_fee:
473                     payout_script = my_script
474                 return payout_script,
475             
476             def get_work(self, payout_script):
477                 if len(p2p_node.peers) == 0 and net.PERSIST:
478                     raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
479                 if current_work.value['best_share_hash'] is None and net.PERSIST:
480                     raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
481                 if time.time() > current_work2.value['last_update'] + 60:
482                     raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
483                 
484                 if current_work.value['mm_chains']:
485                     tree, size = bitcoin_data.make_auxpow_tree(current_work.value['mm_chains'])
486                     mm_hashes = [current_work.value['mm_chains'].get(tree.get(i), dict(hash=0))['hash'] for i in xrange(size)]
487                     mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(dict(
488                         merkle_root=bitcoin_data.merkle_hash(mm_hashes),
489                         size=size,
490                         nonce=0,
491                     ))
492                     mm_later = [(aux_work, mm_hashes.index(aux_work['hash']), mm_hashes) for chain_id, aux_work in current_work.value['mm_chains'].iteritems()]
493                 else:
494                     mm_data = ''
495                     mm_later = []
496                 
497                 share_info, generate_tx = p2pool_data.generate_transaction(
498                     tracker=tracker,
499                     share_data=dict(
500                         previous_share_hash=current_work.value['best_share_hash'],
501                         coinbase=(mm_data + current_work.value['coinbaseflags'])[:100],
502                         nonce=struct.pack('<Q', random.randrange(2**64)),
503                         new_script=payout_script,
504                         subsidy=current_work2.value['subsidy'],
505                         donation=math.perfect_round(65535*args.donation_percentage/100),
506                         stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
507                             253 if orphans > orphans_recorded_in_chain else
508                             254 if doas > doas_recorded_in_chain else
509                             0
510                         )(*get_stale_counts()),
511                     ),
512                     block_target=current_work.value['bits'].target,
513                     desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
514                     net=net,
515                 )
516                 
517                 target = 2**256//2**32 - 1
518                 if len(self.recent_shares_ts_work) == 50:
519                     hash_rate = sum(work for ts, work in self.recent_shares_ts_work)//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
520                     target = min(target, 2**256//(hash_rate * 5))
521                 target = max(target, share_info['bits'].target)
522                 for aux_work in current_work.value['mm_chains'].itervalues():
523                     target = max(target, aux_work['target'])
524                 
525                 transactions = [generate_tx] + list(current_work2.value['transactions'])
526                 merkle_root = bitcoin_data.check_merkle_branch(bitcoin_data.hash256(bitcoin_data.tx_type.pack(generate_tx)), 0, current_work2.value['merkle_branch'])
527                 
528                 getwork_time = time.time()
529                 merkle_branch = current_work2.value['merkle_branch']
530                 
531                 print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % (
532                     bitcoin_data.target_to_difficulty(target),
533                     bitcoin_data.target_to_difficulty(share_info['bits'].target),
534                     current_work2.value['subsidy']*1e-8, net.PARENT.SYMBOL,
535                     len(current_work2.value['transactions']),
536                 )
537                 
538                 ba = bitcoin_getwork.BlockAttempt(
539                     version=current_work.value['version'],
540                     previous_block=current_work.value['previous_block'],
541                     merkle_root=merkle_root,
542                     timestamp=current_work2.value['time'],
543                     bits=current_work.value['bits'],
544                     share_target=target,
545                 )
546                 
547                 def got_response(header, request):
548                     assert header['merkle_root'] == merkle_root
549                     
550                     pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
551                     on_time = current_work.value['best_share_hash'] == share_info['share_data']['previous_share_hash']
552                     
553                     try:
554                         if pow_hash <= header['bits'].target or p2pool.DEBUG:
555                             if factory.conn.value is not None:
556                                 factory.conn.value.send_block(block=dict(header=header, txs=transactions))
557                             else:
558                                 print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Erp!'
559                             if pow_hash <= header['bits'].target:
560                                 print
561                                 print 'GOT BLOCK FROM MINER! Passing to bitcoind! bitcoin: %x' % (bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)),)
562                                 print
563                                 recent_blocks.append({ 'ts': time.time(), 'hash': '%x' % (bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)),) })
564                     except:
565                         log.err(None, 'Error while processing potential block:')
566                     
567                     for aux_work, index, hashes in mm_later:
568                         try:
569                             if pow_hash <= aux_work['target'] or p2pool.DEBUG:
570                                 df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)(
571                                     pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
572                                     bitcoin_data.aux_pow_type.pack(dict(
573                                         merkle_tx=dict(
574                                             tx=transactions[0],
575                                             block_hash=bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)),
576                                             merkle_branch=merkle_branch,
577                                             index=0,
578                                         ),
579                                         merkle_branch=bitcoin_data.calculate_merkle_branch(hashes, index),
580                                         index=index,
581                                         parent_block_header=header,
582                                     )).encode('hex'),
583                                 )
584                                 @df.addCallback
585                                 def _(result):
586                                     if result != (pow_hash <= aux_work['target']):
587                                         print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
588                                     else:
589                                         print 'Merged block submittal result: %s' % (result,)
590                                 @df.addErrback
591                                 def _(err):
592                                     log.err(err, 'Error submitting merged block:')
593                         except:
594                             log.err(None, 'Error while processing merged mining POW:')
595                     
596                     if pow_hash <= share_info['bits'].target:
597                         share = p2pool_data.Share(net, header, share_info, merkle_branch=merkle_branch, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
598                         print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
599                             request.getUser(),
600                             p2pool_data.format_hash(share.hash),
601                             p2pool_data.format_hash(share.previous_hash),
602                             time.time() - getwork_time,
603                             ' DEAD ON ARRIVAL' if not on_time else '',
604                         )
605                         my_share_hashes.add(share.hash)
606                         if not on_time:
607                             my_doa_share_hashes.add(share.hash)
608                         
609                         tracker.add(share)
610                         if not p2pool.DEBUG:
611                             tracker.verified.add(share)
612                         set_real_work2()
613                         
614                         try:
615                             if pow_hash <= header['bits'].target or p2pool.DEBUG:
616                                 for peer in p2p_node.peers.itervalues():
617                                     peer.sendShares([share])
618                                 shared_share_hashes.add(share.hash)
619                         except:
620                             log.err(None, 'Error forwarding block solution:')
621                     
622                     if pow_hash <= target:
623                         reactor.callLater(1, grapher.add_localrate_point, bitcoin_data.target_to_average_attempts(target), not on_time)
624                         if request.getPassword() == vip_pass:
625                             reactor.callLater(1, grapher.add_localminer_point, request.getUser(), bitcoin_data.target_to_average_attempts(target), not on_time)
626                         self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
627                         while len(self.recent_shares_ts_work) > 50:
628                             self.recent_shares_ts_work.pop(0)
629                         recent_shares_ts_work2.append((time.time(), bitcoin_data.target_to_average_attempts(target), not on_time, request.getUser()))
630                     
631                     
632                     if pow_hash > target:
633                         print 'Worker %s submitted share with hash > target:' % (request.getUser(),)
634                         print '    Hash:   %56x' % (pow_hash,)
635                         print '    Target: %56x' % (target,)
636                     
637                     return on_time
638                 
639                 return ba, got_response
640         
641         web_root = resource.Resource()
642         worker_interface.WorkerInterface(WorkerBridge()).attach_to(web_root)
643         
644         def get_rate():
645             if tracker.get_height(current_work.value['best_share_hash']) < 720:
646                 return json.dumps(None)
647             return json.dumps(p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
648                 / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720)))
649         
650         def get_users():
651             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
652             weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
653             res = {}
654             for script in sorted(weights, key=lambda s: weights[s]):
655                 res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
656             return json.dumps(res)
657         
658         def get_current_txouts():
659             share = tracker.shares[current_work.value['best_share_hash']]
660             share_info, gentx = p2pool_data.generate_transaction(tracker, share.share_info['share_data'], share.header['bits'].target, share.share_info['timestamp'], share.net)
661             return dict((out['script'], out['value']) for out in gentx['tx_outs'])
662         
663         def get_current_scaled_txouts(scale, trunc=0):
664             txouts = get_current_txouts()
665             total = sum(txouts.itervalues())
666             results = dict((script, value*scale//total) for script, value in txouts.iteritems())
667             if trunc > 0:
668                 total_random = 0
669                 random_set = set()
670                 for s in sorted(results, key=results.__getitem__):
671                     if results[s] >= trunc:
672                         break
673                     total_random += results[s]
674                     random_set.add(s)
675                 if total_random:
676                     winner = math.weighted_choice((script, results[script]) for script in random_set)
677                     for script in random_set:
678                         del results[script]
679                     results[winner] = total_random
680             if sum(results.itervalues()) < int(scale):
681                 results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
682             return results
683         
684         def get_current_payouts():
685             return json.dumps(dict((bitcoin_data.script2_to_human(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems()))
686         
687         def get_patron_sendmany(this):
688             try:
689                 if '/' in this:
690                     this, trunc = this.split('/', 1)
691                 else:
692                     trunc = '0.01'
693                 return json.dumps(dict(
694                     (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
695                     for script, value in get_current_scaled_txouts(scale=int(float(this)*1e8), trunc=int(float(trunc)*1e8)).iteritems()
696                     if bitcoin_data.script2_to_address(script, net.PARENT) is not None
697                 ))
698             except:
699                 return json.dumps(None)
700         
701         def get_global_stats():
702             # averaged over last hour
703             lookbehind = 3600//net.SHARE_PERIOD
704             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
705                 return None
706             
707             nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
708             stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
709             return json.dumps(dict(
710                 pool_nonstale_hash_rate=nonstale_hash_rate,
711                 pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
712                 pool_stale_prop=stale_prop,
713             ))
714         
715         def get_local_stats():
716             lookbehind = 3600//net.SHARE_PERIOD
717             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
718                 return None
719             
720             global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
721             
722             my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
723             my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
724             my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
725             my_share_count = my_unstale_count + my_orphan_count + my_doa_count
726             my_stale_count = my_orphan_count + my_doa_count
727             
728             my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
729             
730             my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
731                 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1)
732                 if share.hash in my_share_hashes)
733             actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp -
734                 tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp)
735             share_att_s = my_work / actual_time
736             
737             miner_hash_rates = {}
738             miner_dead_hash_rates = {}
739             for ts, work, dead, user in recent_shares_ts_work2:
740                 miner_hash_rates[user] = miner_hash_rates.get(user, 0) + work/600
741                 if dead:
742                     miner_dead_hash_rates[user] = miner_hash_rates.get(user, 0) + work/600
743             
744             return json.dumps(dict(
745                 my_hash_rates_in_last_hour=dict(
746                     note="DEPRECATED",
747                     nonstale=share_att_s,
748                     rewarded=share_att_s/(1 - global_stale_prop),
749                     actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
750                 ),
751                 my_share_counts_in_last_hour=dict(
752                     shares=my_share_count,
753                     unstale_shares=my_unstale_count,
754                     stale_shares=my_stale_count,
755                     orphan_stale_shares=my_orphan_count,
756                     doa_stale_shares=my_doa_count,
757                 ),
758                 my_stale_proportions_in_last_hour=dict(
759                     stale=my_stale_prop,
760                     orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
761                     dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
762                 ),
763                 miner_hash_rates=miner_hash_rates,
764                 miner_dead_hash_rates=miner_dead_hash_rates,
765             ))
766         
767         def get_peer_addresses():
768             return ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues())
769         
770         def get_uptime():
771             return json.dumps(time.time() - start_time)
772         
773         class WebInterface(resource.Resource):
774             def __init__(self, func, mime_type, *fields):
775                 self.func, self.mime_type, self.fields = func, mime_type, fields
776             
777             def render_GET(self, request):
778                 request.setHeader('Content-Type', self.mime_type)
779                 request.setHeader('Access-Control-Allow-Origin', '*')
780                 return self.func(*(request.args[field][0] for field in self.fields))
781         
782         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
783         web_root.putChild('users', WebInterface(get_users, 'application/json'))
784         web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
785         web_root.putChild('current_payouts', WebInterface(get_current_payouts, 'application/json'))
786         web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain', 'total'))
787         web_root.putChild('global_stats', WebInterface(get_global_stats, 'application/json'))
788         web_root.putChild('local_stats', WebInterface(get_local_stats, 'application/json'))
789         web_root.putChild('peer_addresses', WebInterface(get_peer_addresses, 'text/plain'))
790         web_root.putChild('payout_addr', WebInterface(lambda: json.dumps(bitcoin_data.script2_to_human(my_script, net.PARENT)), 'application/json'))
791         web_root.putChild('recent_blocks', WebInterface(lambda: json.dumps(recent_blocks), 'application/json'))
792         web_root.putChild('uptime', WebInterface(get_uptime, 'application/json'))
793         if draw is not None:
794             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
795         
796         grapher = graphs.Grapher(os.path.join(datadir_path, 'rrd'))
797         web_root.putChild('graphs', grapher.get_resource())
798         def add_point():
799             if tracker.get_height(current_work.value['best_share_hash']) < 720:
800                 return
801             nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
802             poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
803             grapher.add_poolrate_point(poolrate, poolrate - nonstalerate)
804         task.LoopingCall(add_point).start(100)
805         
806         def attempt_listen():
807             try:
808                 reactor.listenTCP(args.worker_port, server.Site(web_root))
809             except error.CannotListenError, e:
810                 print >>sys.stderr, 'Error binding to worker port: %s. Retrying in 1 second.' % (e.socketError,)
811                 reactor.callLater(1, attempt_listen)
812             else:
813                 with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
814                     pass
815         attempt_listen()
816         
817         print '    ...success!'
818         print
819         
820         
821         @defer.inlineCallbacks
822         def work_poller():
823             while True:
824                 flag = factory.new_block.get_deferred()
825                 try:
826                     yield set_real_work1()
827                 except:
828                     log.err()
829                 yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
830         work_poller()
831         
832         
833         # done!
834         print 'Started successfully!'
835         print
836         
837         
838         if hasattr(signal, 'SIGALRM'):
839             signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
840                 sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
841             ))
842             signal.siginterrupt(signal.SIGALRM, False)
843             task.LoopingCall(signal.alarm, 30).start(1)
844         
845         if args.irc_announce:
846             from twisted.words.protocols import irc
847             class IRCClient(irc.IRCClient):
848                 nickname = 'p2pool'
849                 def lineReceived(self, line):
850                     print repr(line)
851                     irc.IRCClient.lineReceived(self, line)
852                 def signedOn(self):
853                     irc.IRCClient.signedOn(self)
854                     self.factory.resetDelay()
855                     self.join('#p2pool')
856                     self.watch_id = tracker.verified.added.watch(self._new_share)
857                     self.announced_hashes = set()
858                 def _new_share(self, share):
859                     if share.pow_hash <= share.header['bits'].target and share.header_hash not in self.announced_hashes:
860                         self.announced_hashes.add(share.header_hash)
861                         self.say('#p2pool', '\x02BLOCK FOUND by %s! http://blockexplorer.com/block/%064x' % (bitcoin_data.script2_to_address(share.share_data['new_script'], net.PARENT), share.header_hash))
862                 def connectionLost(self, reason):
863                     tracker.verified.added.unwatch(self.watch_id)
864                     print 'IRC connection lost:', reason.getErrorMessage()
865             class IRCClientFactory(protocol.ReconnectingClientFactory):
866                 protocol = IRCClient
867             reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())
868         
869         @defer.inlineCallbacks
870         def status_thread():
871             average_period = 600
872             first_pseudoshare_time = None
873             
874             last_str = None
875             last_time = 0
876             while True:
877                 yield deferral.sleep(3)
878                 try:
879                     if time.time() > current_work2.value['last_update'] + 60:
880                         print >>sys.stderr, '''---> LOST CONTACT WITH BITCOIND for %s! Check that it isn't frozen or dead! <---''' % (math.format_dt(time.time() - current_work2.value['last_update']),)
881                     
882                     height = tracker.get_height(current_work.value['best_share_hash'])
883                     this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
884                         height,
885                         len(tracker.verified.shares),
886                         len(tracker.shares),
887                         len(p2p_node.peers),
888                         sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
889                     ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
890                     
891                     if first_pseudoshare_time is None and recent_shares_ts_work2:
892                         first_pseudoshare_time = recent_shares_ts_work2[0][0]
893                     while recent_shares_ts_work2 and recent_shares_ts_work2[0][0] < time.time() - average_period:
894                         recent_shares_ts_work2.pop(0)
895                     my_att_s = sum(work for ts, work, dead, user in recent_shares_ts_work2)/min(time.time() - first_pseudoshare_time, average_period) if first_pseudoshare_time is not None else 0
896                     this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
897                         math.format(int(my_att_s)),
898                         math.format_dt(min(time.time() - first_pseudoshare_time, average_period) if first_pseudoshare_time is not None else 0),
899                         math.format_binomial_conf(sum(1 for tx, work, dead, user in recent_shares_ts_work2 if dead), len(recent_shares_ts_work2), 0.95),
900                         math.format_dt(2**256 / tracker.shares[current_work.value['best_share_hash']].target / my_att_s) if my_att_s else '???',
901                     )
902                     
903                     if height > 2:
904                         (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
905                         stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], min(720, height))
906                         real_att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720)) / (1 - stale_prop)
907                         
908                         this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % (
909                             shares, stale_orphan_shares, stale_doa_shares,
910                             math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
911                             math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
912                             get_current_txouts().get(my_script, 0)*1e-8, net.PARENT.SYMBOL,
913                         )
914                         this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
915                             math.format(int(real_att_s)),
916                             100*stale_prop,
917                             math.format_dt(2**256 / current_work.value['bits'].target / real_att_s),
918                         )
919                     
920                     if this_str != last_str or time.time() > last_time + 15:
921                         print this_str
922                         last_str = this_str
923                         last_time = time.time()
924                 except:
925                     log.err()
926         status_thread()
927     except:
928         log.err(None, 'Fatal error:')
929         reactor.stop()
930
931 def run():
932     class FixedArgumentParser(argparse.ArgumentParser):
933         def _read_args_from_files(self, arg_strings):
934             # expand arguments referencing files
935             new_arg_strings = []
936             for arg_string in arg_strings:
937                 
938                 # for regular arguments, just add them back into the list
939                 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
940                     new_arg_strings.append(arg_string)
941                 
942                 # replace arguments referencing files with the file content
943                 else:
944                     try:
945                         args_file = open(arg_string[1:])
946                         try:
947                             arg_strings = []
948                             for arg_line in args_file.read().splitlines():
949                                 for arg in self.convert_arg_line_to_args(arg_line):
950                                     arg_strings.append(arg)
951                             arg_strings = self._read_args_from_files(arg_strings)
952                             new_arg_strings.extend(arg_strings)
953                         finally:
954                             args_file.close()
955                     except IOError:
956                         err = sys.exc_info()[1]
957                         self.error(str(err))
958             
959             # return the modified argument list
960             return new_arg_strings
961         
962         def convert_arg_line_to_args(self, arg_line):
963             return [arg for arg in arg_line.split() if arg.strip()]
964     
965     
966     realnets=dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
967     
968     parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
969     parser.add_argument('--version', action='version', version=p2pool.__version__)
970     parser.add_argument('--net',
971         help='use specified network (default: bitcoin)',
972         action='store', choices=sorted(realnets), default='bitcoin', dest='net_name')
973     parser.add_argument('--testnet',
974         help='''use the network's testnet''',
975         action='store_const', const=True, default=False, dest='testnet')
976     parser.add_argument('--debug',
977         help='enable debugging mode',
978         action='store_const', const=True, default=False, dest='debug')
979     parser.add_argument('-a', '--address',
980         help='generate payouts to this address (default: <address requested from bitcoind>)',
981         type=str, action='store', default=None, dest='address')
982     parser.add_argument('--datadir',
983         help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
984         type=str, action='store', default=None, dest='datadir')
985     parser.add_argument('--logfile',
986         help='''log to this file (default: data/<NET>/log)''',
987         type=str, action='store', default=None, dest='logfile')
988     parser.add_argument('--merged',
989         help='call getauxblock on this url to get work for merged mining (example: http://ncuser:ncpass@127.0.0.1:10332/)',
990         type=str, action='append', default=[], dest='merged_urls')
991     parser.add_argument('--merged-url',
992         help='DEPRECATED, use --merged',
993         type=str, action='store', default=None, dest='merged_url')
994     parser.add_argument('--merged-userpass',
995         help='DEPRECATED, use --merged',
996         type=str, action='store', default=None, dest='merged_userpass')
997     parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
998         help='donate this percentage of work to author of p2pool (default: 0.5)',
999         type=float, action='store', default=0.5, dest='donation_percentage')
1000     parser.add_argument('--irc-announce',
1001         help='announce any blocks found on irc://irc.freenode.net/#p2pool',
1002         action='store_true', default=False, dest='irc_announce')
1003     
1004     p2pool_group = parser.add_argument_group('p2pool interface')
1005     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
1006         help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
1007         type=int, action='store', default=None, dest='p2pool_port')
1008     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
1009         help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
1010         type=str, action='append', default=[], dest='p2pool_nodes')
1011     parser.add_argument('--disable-upnp',
1012         help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
1013         action='store_false', default=True, dest='upnp')
1014     
1015     worker_group = parser.add_argument_group('worker interface')
1016     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
1017         help='listen on PORT for RPC connections from miners (default: %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
1018         type=int, action='store', default=None, dest='worker_port')
1019     worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
1020         help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
1021         type=float, action='store', default=0, dest='worker_fee')
1022     
1023     bitcoind_group = parser.add_argument_group('bitcoind interface')
1024     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
1025         help='connect to this address (default: 127.0.0.1)',
1026         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
1027     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
1028         help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
1029         type=int, action='store', default=None, dest='bitcoind_rpc_port')
1030     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
1031         help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
1032         type=int, action='store', default=None, dest='bitcoind_p2p_port')
1033     
1034     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
1035         help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
1036         type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
1037     
1038     args = parser.parse_args()
1039     
1040     if args.debug:
1041         p2pool.DEBUG = True
1042     
1043     net_name = args.net_name + ('_testnet' if args.testnet else '')
1044     net = networks.nets[net_name]
1045     
1046     datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
1047     if not os.path.exists(datadir_path):
1048         os.makedirs(datadir_path)
1049     
1050     if len(args.bitcoind_rpc_userpass) > 2:
1051         parser.error('a maximum of two arguments are allowed')
1052     args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
1053     
1054     if args.bitcoind_rpc_password is None:
1055         if not hasattr(net.PARENT, 'CONF_FILE_FUNC'):
1056             parser.error('This network has no configuration file function. Manually enter your RPC password.')
1057         conf_path = net.PARENT.CONF_FILE_FUNC()
1058         if not os.path.exists(conf_path):
1059             parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
1060                 '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
1061                 '''\r\n'''
1062                 '''server=1\r\n'''
1063                 '''rpcpassword=%x''' % (conf_path, random.randrange(2**128)))
1064         with open(conf_path, 'rb') as f:
1065             cp = ConfigParser.RawConfigParser()
1066             cp.readfp(StringIO.StringIO('[x]\r\n' + f.read()))
1067             for conf_name, var_name, var_type in [
1068                 ('rpcuser', 'bitcoind_rpc_username', str),
1069                 ('rpcpassword', 'bitcoind_rpc_password', str),
1070                 ('rpcport', 'bitcoind_rpc_port', int),
1071                 ('port', 'bitcoind_p2p_port', int),
1072             ]:
1073                 if getattr(args, var_name) is None and cp.has_option('x', conf_name):
1074                     setattr(args, var_name, var_type(cp.get('x', conf_name)))
1075     
1076     if args.bitcoind_rpc_username is None:
1077         args.bitcoind_rpc_username = ''
1078     
1079     if args.bitcoind_rpc_port is None:
1080         args.bitcoind_rpc_port = net.PARENT.RPC_PORT
1081     
1082     if args.bitcoind_p2p_port is None:
1083         args.bitcoind_p2p_port = net.PARENT.P2P_PORT
1084     
1085     if args.p2pool_port is None:
1086         args.p2pool_port = net.P2P_PORT
1087     
1088     if args.worker_port is None:
1089         args.worker_port = net.WORKER_PORT
1090     
1091     if args.address is not None:
1092         try:
1093             args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
1094         except Exception, e:
1095             parser.error('error parsing address: ' + repr(e))
1096     else:
1097         args.pubkey_hash = None
1098     
1099     def separate_url(url):
1100         s = urlparse.urlsplit(url)
1101         if '@' not in s.netloc:
1102             parser.error('merged url netloc must contain an "@"')
1103         userpass, new_netloc = s.netloc.rsplit('@', 1)
1104         return urlparse.urlunsplit(s._replace(netloc=new_netloc)), userpass
1105     merged_urls = map(separate_url, args.merged_urls)
1106     
1107     if args.merged_url is not None or args.merged_userpass is not None:
1108         print '--merged-url and --merged-userpass are deprecated! Use --merged http://USER:PASS@HOST:PORT/ instead!'
1109         print 'Pausing 10 seconds...'
1110         time.sleep(10)
1111         
1112         if args.merged_url is None or args.merged_userpass is None:
1113             parser.error('must specify both --merged-url and --merged-userpass')
1114         
1115         merged_urls = merged_urls + [(args.merged_url, args.merged_userpass)]
1116     
1117     
1118     if args.logfile is None:
1119         args.logfile = os.path.join(datadir_path, 'log')
1120     
1121     logfile = logging.LogFile(args.logfile)
1122     pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
1123     sys.stdout = logging.AbortPipe(pipe)
1124     sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
1125     if hasattr(signal, "SIGUSR1"):
1126         def sigusr1(signum, frame):
1127             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
1128             logfile.reopen()
1129             print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
1130         signal.signal(signal.SIGUSR1, sigusr1)
1131     task.LoopingCall(logfile.reopen).start(5)
1132     
1133     reactor.callWhenRunning(main, args, net, datadir_path, merged_urls)
1134     reactor.run()