bc0ae298564539657e46f4e08116ed32cf0b859e
[p2pool.git] / p2pool / main.py
1 from __future__ import division
2
3 import ConfigParser
4 import StringIO
5 import argparse
6 import os
7 import random
8 import struct
9 import sys
10 import time
11 import json
12 import signal
13 import traceback
14 import urlparse
15
16 from twisted.internet import defer, error, reactor, protocol, task
17 from twisted.web import server, resource
18 from twisted.python import log
19 from nattraverso import portmapper, ipdiscover
20
21 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
22 from bitcoin import worker_interface
23 from util import expiring_dict, jsonrpc, variable, deferral, math, logging, pack
24 from . import p2p, networks, graphs
25 import p2pool, p2pool.data as p2pool_data
26
27 @deferral.retry('Error getting work from bitcoind:', 3)
28 @defer.inlineCallbacks
29 def getwork(bitcoind):
30     try:
31         work = yield bitcoind.rpc_getmemorypool()
32     except jsonrpc.Error, e:
33         if e.code == -32601: # Method not found
34             print >>sys.stderr, 'Error: Bitcoin version too old! Upgrade to v0.5 or newer!'
35             raise deferral.RetrySilentlyException()
36         raise
37     packed_transactions = [x.decode('hex') for x in work['transactions']]
38     defer.returnValue(dict(
39         version=work['version'],
40         previous_block_hash=int(work['previousblockhash'], 16),
41         transactions=map(bitcoin_data.tx_type.unpack, packed_transactions),
42         merkle_branch=bitcoin_data.calculate_merkle_branch([0] + map(bitcoin_data.hash256, packed_transactions), 0),
43         subsidy=work['coinbasevalue'],
44         time=work['time'],
45         bits=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
46         coinbaseflags=work['coinbaseflags'].decode('hex') if 'coinbaseflags' in work else ''.join(x.decode('hex') for x in work['coinbaseaux'].itervalues()) if 'coinbaseaux' in work else '',
47     ))
48
49 @defer.inlineCallbacks
50 def main(args, net, datadir_path, merged_urls, worker_endpoint):
51     try:
52         print 'p2pool (version %s)' % (p2pool.__version__,)
53         print
54         try:
55             from . import draw
56         except ImportError:
57             draw = None
58             print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
59             print
60         
61         # connect to bitcoind over JSON-RPC and do initial getmemorypool
62         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
63         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
64         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
65         good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.PARENT.RPC_CHECK)(bitcoind)
66         if not good:
67             print >>sys.stderr, "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
68             return
69         temp_work = yield getwork(bitcoind)
70         print '    ...success!'
71         print '    Current block hash: %x' % (temp_work['previous_block_hash'],)
72         print
73         
74         # connect to bitcoind over bitcoin-p2p
75         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
76         factory = bitcoin_p2p.ClientFactory(net.PARENT)
77         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
78         yield factory.getProtocol() # waits until handshake is successful
79         print '    ...success!'
80         print
81         
82         print 'Determining payout script...'
83         if args.pubkey_hash is None:
84             address_path = os.path.join(datadir_path, 'cached_payout_address')
85             
86             if os.path.exists(address_path):
87                 with open(address_path, 'rb') as f:
88                     address = f.read().strip('\r\n')
89                 print '    Loaded cached address: %s...' % (address,)
90             else:
91                 address = None
92             
93             if address is not None:
94                 res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))()
95                 if not res['isvalid'] or not res['ismine']:
96                     print '    Cached address is either invalid or not controlled by local bitcoind!'
97                     address = None
98             
99             if address is None:
100                 print '    Getting payout address from bitcoind...'
101                 address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()
102             
103             with open(address_path, 'wb') as f:
104                 f.write(address)
105             
106             my_script = bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(address, net.PARENT))
107         else:
108             print '    ...Computing payout script from provided address...'
109             my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
110         print '    ...success! Payout script:', bitcoin_data.script2_to_human(my_script, net.PARENT)
111         print
112         
113         my_share_hashes = set()
114         my_doa_share_hashes = set()
115         
116         tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes)
117         shared_share_hashes = set()
118         ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
119         known_verified = set()
120         recent_blocks = []
121         print "Loading shares..."
122         for i, (mode, contents) in enumerate(ss.get_shares()):
123             if mode == 'share':
124                 if contents.hash in tracker.shares:
125                     continue
126                 shared_share_hashes.add(contents.hash)
127                 contents.time_seen = 0
128                 tracker.add(contents)
129                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
130                     print "    %i" % (len(tracker.shares),)
131             elif mode == 'verified_hash':
132                 known_verified.add(contents)
133             else:
134                 raise AssertionError()
135         print "    ...inserting %i verified shares..." % (len(known_verified),)
136         for h in known_verified:
137             if h not in tracker.shares:
138                 ss.forget_verified_share(h)
139                 continue
140             tracker.verified.add(tracker.shares[h])
141         print "    ...done loading %i shares!" % (len(tracker.shares),)
142         print
143         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
144         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
145         tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
146         
147         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
148         
149         pre_current_work = variable.Variable(None)
150         pre_merged_work = variable.Variable({})
151         # information affecting work that should trigger a long-polling update
152         current_work = variable.Variable(None)
153         # information affecting work that should not trigger a long-polling update
154         current_work2 = variable.Variable(None)
155         
156         requested = expiring_dict.ExpiringDict(300)
157         
158         print 'Initializing work...'
159         @defer.inlineCallbacks
160         def set_real_work1():
161             work = yield getwork(bitcoind)
162             current_work2.set(dict(
163                 time=work['time'],
164                 transactions=work['transactions'],
165                 merkle_branch=work['merkle_branch'],
166                 subsidy=work['subsidy'],
167                 clock_offset=time.time() - work['time'],
168                 last_update=time.time(),
169             )) # second set first because everything hooks on the first
170             pre_current_work.set(dict(
171                 version=work['version'],
172                 previous_block=work['previous_block_hash'],
173                 bits=work['bits'],
174                 coinbaseflags=work['coinbaseflags'],
175             ))
176         yield set_real_work1()
177         
178         if '\ngetblock ' in (yield deferral.retry()(bitcoind.rpc_help)()):
179             height_cacher = deferral.DeferredCacher(defer.inlineCallbacks(lambda block_hash: defer.returnValue((yield bitcoind.rpc_getblock('%x' % (block_hash,)))['blockcount'])))
180             best_height_cached = variable.Variable((yield deferral.retry()(height_cacher)(pre_current_work.value['previous_block'])))
181             def get_height_rel_highest(block_hash):
182                 this_height = height_cacher.call_now(block_hash, 0)
183                 best_height = height_cacher.call_now(pre_current_work.value['previous_block'], 0)
184                 best_height_cached.set(max(best_height_cached.value, this_height, best_height))
185                 return this_height - best_height_cached.value
186         else:
187             get_height_rel_highest = bitcoin_p2p.HeightTracker(bitcoind, factory).get_height_rel_highest
188         
189         def set_real_work2():
190             best, desired = tracker.think(get_height_rel_highest, pre_current_work.value['previous_block'], pre_current_work.value['bits'])
191             
192             t = dict(pre_current_work.value)
193             t['best_share_hash'] = best
194             t['mm_chains'] = pre_merged_work.value
195             current_work.set(t)
196             
197             t = time.time()
198             for peer2, share_hash in desired:
199                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
200                     continue
201                 last_request_time, count = requested.get(share_hash, (None, 0))
202                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
203                     continue
204                 potential_peers = set()
205                 for head in tracker.tails[share_hash]:
206                     potential_peers.update(peer_heads.get(head, set()))
207                 potential_peers = [peer for peer in potential_peers if peer.connected2]
208                 if count == 0 and peer2 is not None and peer2.connected2:
209                     peer = peer2
210                 else:
211                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
212                     if peer is None:
213                         continue
214                 
215                 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
216                 peer.send_getshares(
217                     hashes=[share_hash],
218                     parents=2000,
219                     stops=list(set(tracker.heads) | set(
220                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
221                     ))[:100],
222                 )
223                 requested[share_hash] = t, count + 1
224         pre_current_work.changed.watch(lambda _: set_real_work2())
225         pre_merged_work.changed.watch(lambda _: set_real_work2())
226         set_real_work2()
227         print '    ...success!'
228         print
229         
230         
231         @defer.inlineCallbacks
232         def set_merged_work(merged_url, merged_userpass):
233             merged_proxy = jsonrpc.Proxy(merged_url, (merged_userpass,))
234             while True:
235                 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged_proxy.rpc_getauxblock)()
236                 pre_merged_work.set(dict(pre_merged_work.value, **{auxblock['chainid']: dict(
237                     hash=int(auxblock['hash'], 16),
238                     target=pack.IntType(256).unpack(auxblock['target'].decode('hex')),
239                     merged_proxy=merged_proxy,
240                 )}))
241                 yield deferral.sleep(1)
242         for merged_url, merged_userpass in merged_urls:
243             set_merged_work(merged_url, merged_userpass)
244         
245         @pre_merged_work.changed.watch
246         def _(new_merged_work):
247             print 'Got new merged mining work!'
248         
249         # setup p2p logic and join p2pool network
250         
251         class Node(p2p.Node):
252             def handle_shares(self, shares, peer):
253                 if len(shares) > 5:
254                     print 'Processing %i shares from %s...' % (len(shares), '%s:%i' % peer.addr if peer is not None else None)
255                 
256                 new_count = 0
257                 for share in shares:
258                     if share.hash in tracker.shares:
259                         #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
260                         continue
261                     
262                     new_count += 1
263                     
264                     #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
265                     
266                     tracker.add(share)
267                 
268                 if shares and peer is not None:
269                     peer_heads.setdefault(shares[0].hash, set()).add(peer)
270                 
271                 if new_count:
272                     set_real_work2()
273                 
274                 if len(shares) > 5:
275                     print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
276             
277             def handle_share_hashes(self, hashes, peer):
278                 t = time.time()
279                 get_hashes = []
280                 for share_hash in hashes:
281                     if share_hash in tracker.shares:
282                         continue
283                     last_request_time, count = requested.get(share_hash, (None, 0))
284                     if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
285                         continue
286                     print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
287                     get_hashes.append(share_hash)
288                     requested[share_hash] = t, count + 1
289                 
290                 if hashes and peer is not None:
291                     peer_heads.setdefault(hashes[0], set()).add(peer)
292                 if get_hashes:
293                     peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
294             
295             def handle_get_shares(self, hashes, parents, stops, peer):
296                 parents = min(parents, 1000//len(hashes))
297                 stops = set(stops)
298                 shares = []
299                 for share_hash in hashes:
300                     for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))):
301                         if share.hash in stops:
302                             break
303                         shares.append(share)
304                 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
305                 peer.sendShares(shares)
306         
307         @tracker.verified.added.watch
308         def _(share):
309             if share.pow_hash <= share.header['bits'].target:
310                 if factory.conn.value is not None:
311                     factory.conn.value.send_block(block=share.as_block(tracker))
312                 else:
313                     print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Erp!'
314                 print
315                 print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % (p2pool_data.format_hash(share.hash), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
316                 print
317                 recent_blocks.append(dict(ts=share.timestamp, hash='%064x' % (share.header_hash,)))
318         
319         print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
320         
321         @defer.inlineCallbacks
322         def parse(x):
323             if ':' in x:
324                 ip, port = x.split(':')
325                 defer.returnValue(((yield reactor.resolve(ip)), int(port)))
326             else:
327                 defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT))
328         
329         addrs = {}
330         if os.path.exists(os.path.join(datadir_path, 'addrs.txt')):
331             try:
332                 addrs.update(dict(eval(x) for x in open(os.path.join(datadir_path, 'addrs.txt'))))
333             except:
334                 print >>sys.stderr, "error reading addrs"
335         for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
336             try:
337                 addr = yield addr_df
338                 if addr not in addrs:
339                     addrs[addr] = (0, time.time(), time.time())
340             except:
341                 log.err()
342         
343         connect_addrs = set()
344         for addr_df in map(parse, args.p2pool_nodes):
345             try:
346                 connect_addrs.add((yield addr_df))
347             except:
348                 log.err()
349         
350         p2p_node = Node(
351             best_share_hash_func=lambda: current_work.value['best_share_hash'],
352             port=args.p2pool_port,
353             net=net,
354             addr_store=addrs,
355             connect_addrs=connect_addrs,
356         )
357         p2p_node.start()
358         
359         def save_addrs():
360             open(os.path.join(datadir_path, 'addrs.txt'), 'w').writelines(repr(x) + '\n' for x in p2p_node.addr_store.iteritems())
361         task.LoopingCall(save_addrs).start(60)
362         
363         # send share when the chain changes to their chain
364         def work_changed(new_work):
365             #print 'Work changed:', new_work
366             shares = []
367             for share in tracker.get_chain(new_work['best_share_hash'], min(5, tracker.get_height(new_work['best_share_hash']))):
368                 if share.hash in shared_share_hashes:
369                     break
370                 shared_share_hashes.add(share.hash)
371                 shares.append(share)
372             
373             for peer in p2p_node.peers.itervalues():
374                 peer.sendShares([share for share in shares if share.peer is not peer])
375         
376         current_work.changed.watch(work_changed)
377         
378         def save_shares():
379             for share in tracker.get_chain(current_work.value['best_share_hash'], min(tracker.get_height(current_work.value['best_share_hash']), 2*net.CHAIN_LENGTH)):
380                 ss.add_share(share)
381                 if share.hash in tracker.verified.shares:
382                     ss.add_verified_hash(share.hash)
383         task.LoopingCall(save_shares).start(60)
384         
385         print '    ...success!'
386         print
387         
388         start_time = time.time()
389         
390         @defer.inlineCallbacks
391         def upnp_thread():
392             while True:
393                 try:
394                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
395                     if is_lan:
396                         pm = yield portmapper.get_port_mapper()
397                         yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
398                 except defer.TimeoutError:
399                     pass
400                 except:
401                     if p2pool.DEBUG:
402                         log.err(None, "UPnP error:")
403                 yield deferral.sleep(random.expovariate(1/120))
404         
405         if args.upnp:
406             upnp_thread()
407         
408         # start listening for workers with a JSON-RPC server
409         
410         print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1])
411         
412         if os.path.exists(os.path.join(datadir_path, 'vip_pass')):
413             with open(os.path.join(datadir_path, 'vip_pass'), 'rb') as f:
414                 vip_pass = f.read().strip('\r\n')
415         else:
416             vip_pass = '%016x' % (random.randrange(2**64),)
417             with open(os.path.join(datadir_path, 'vip_pass'), 'wb') as f:
418                 f.write(vip_pass)
419         print '    Worker password:', vip_pass, '(only required for generating graphs)'
420         
421         # setup worker logic
422         
423         removed_unstales_var = variable.Variable((0, 0, 0))
424         @tracker.verified.removed.watch
425         def _(share):
426             if share.hash in my_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
427                 assert share.share_data['stale_info'] in [0, 253, 254] # we made these shares in this instance
428                 removed_unstales_var.set((
429                     removed_unstales_var.value[0] + 1,
430                     removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 253 else 0),
431                     removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 254 else 0),
432                 ))
433         
434         removed_doa_unstales_var = variable.Variable(0)
435         @tracker.verified.removed.watch
436         def _(share):
437             if share.hash in my_doa_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
438                 removed_doa_unstales.set(removed_doa_unstales.value + 1)
439         
440         def get_stale_counts():
441             '''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)'''
442             my_shares = len(my_share_hashes)
443             my_doa_shares = len(my_doa_share_hashes)
444             delta = tracker.verified.get_delta(current_work.value['best_share_hash'])
445             my_shares_in_chain = delta.my_count + removed_unstales_var.value[0]
446             my_doa_shares_in_chain = delta.my_doa_count + removed_doa_unstales_var.value
447             orphans_recorded_in_chain = delta.my_orphan_announce_count + removed_unstales_var.value[1]
448             doas_recorded_in_chain = delta.my_dead_announce_count + removed_unstales_var.value[2]
449             
450             my_shares_not_in_chain = my_shares - my_shares_in_chain
451             my_doa_shares_not_in_chain = my_doa_shares - my_doa_shares_in_chain
452             
453             return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
454         
455         
456         local_rate_monitor = math.RateMonitor(10*60)
457         
458         class WorkerBridge(worker_interface.WorkerBridge):
459             def __init__(self):
460                 worker_interface.WorkerBridge.__init__(self)
461                 self.new_work_event = current_work.changed
462                 self.recent_shares_ts_work = []
463             
464             def _get_payout_script_from_username(self, user):
465                 if user is None:
466                     return None
467                 try:
468                     pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, net.PARENT)
469                 except: # XXX blah
470                     return None
471                 return bitcoin_data.pubkey_hash_to_script2(pubkey_hash)
472             
473             def preprocess_request(self, request):
474                 payout_script = self._get_payout_script_from_username(request.getUser())
475                 if payout_script is None or random.uniform(0, 100) < args.worker_fee:
476                     payout_script = my_script
477                 return payout_script,
478             
479             def get_work(self, payout_script):
480                 if len(p2p_node.peers) == 0 and net.PERSIST:
481                     raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
482                 if current_work.value['best_share_hash'] is None and net.PERSIST:
483                     raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
484                 if time.time() > current_work2.value['last_update'] + 60:
485                     raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
486                 
487                 if current_work.value['mm_chains']:
488                     tree, size = bitcoin_data.make_auxpow_tree(current_work.value['mm_chains'])
489                     mm_hashes = [current_work.value['mm_chains'].get(tree.get(i), dict(hash=0))['hash'] for i in xrange(size)]
490                     mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(dict(
491                         merkle_root=bitcoin_data.merkle_hash(mm_hashes),
492                         size=size,
493                         nonce=0,
494                     ))
495                     mm_later = [(aux_work, mm_hashes.index(aux_work['hash']), mm_hashes) for chain_id, aux_work in current_work.value['mm_chains'].iteritems()]
496                 else:
497                     mm_data = ''
498                     mm_later = []
499                 
500                 share_info, generate_tx = p2pool_data.generate_transaction(
501                     tracker=tracker,
502                     share_data=dict(
503                         previous_share_hash=current_work.value['best_share_hash'],
504                         coinbase=(mm_data + current_work.value['coinbaseflags'])[:100],
505                         nonce=struct.pack('<Q', random.randrange(2**64)),
506                         new_script=payout_script,
507                         subsidy=current_work2.value['subsidy'],
508                         donation=math.perfect_round(65535*args.donation_percentage/100),
509                         stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
510                             253 if orphans > orphans_recorded_in_chain else
511                             254 if doas > doas_recorded_in_chain else
512                             0
513                         )(*get_stale_counts()),
514                     ),
515                     block_target=current_work.value['bits'].target,
516                     desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
517                     net=net,
518                 )
519                 
520                 target = net.PARENT.SANE_MAX_TARGET
521                 if len(self.recent_shares_ts_work) == 50:
522                     hash_rate = sum(work for ts, work in self.recent_shares_ts_work)//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
523                     target = min(target, 2**256//(hash_rate * 5))
524                 target = max(target, share_info['bits'].target)
525                 for aux_work in current_work.value['mm_chains'].itervalues():
526                     target = max(target, aux_work['target'])
527                 
528                 transactions = [generate_tx] + list(current_work2.value['transactions'])
529                 merkle_root = bitcoin_data.check_merkle_branch(bitcoin_data.hash256(bitcoin_data.tx_type.pack(generate_tx)), 0, current_work2.value['merkle_branch'])
530                 
531                 getwork_time = time.time()
532                 merkle_branch = current_work2.value['merkle_branch']
533                 
534                 print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % (
535                     bitcoin_data.target_to_difficulty(target),
536                     bitcoin_data.target_to_difficulty(share_info['bits'].target),
537                     current_work2.value['subsidy']*1e-8, net.PARENT.SYMBOL,
538                     len(current_work2.value['transactions']),
539                 )
540                 
541                 ba = bitcoin_getwork.BlockAttempt(
542                     version=current_work.value['version'],
543                     previous_block=current_work.value['previous_block'],
544                     merkle_root=merkle_root,
545                     timestamp=current_work2.value['time'],
546                     bits=current_work.value['bits'],
547                     share_target=target,
548                 )
549                 
550                 received_header_hashes = set()
551                 
552                 def got_response(header, request):
553                     assert header['merkle_root'] == merkle_root
554                     
555                     header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header))
556                     pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
557                     on_time = current_work.value['best_share_hash'] == share_info['share_data']['previous_share_hash']
558                     
559                     try:
560                         if pow_hash <= header['bits'].target or p2pool.DEBUG:
561                             @deferral.retry('Error submitting primary block: (will retry)', 10, 10)
562                             def submit_block():
563                                 if factory.conn.value is None:
564                                     print >>sys.stderr, 'No bitcoind connection when block submittal attempted! %s%32x' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
565                                     raise deferral.RetrySilentlyException()
566                                 factory.conn.value.send_block(block=dict(header=header, txs=transactions))
567                             submit_block()
568                             if pow_hash <= header['bits'].target:
569                                 print
570                                 print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
571                                 print
572                                 recent_blocks.append(dict(ts=time.time(), hash='%064x' % (header_hash,)))
573                     except:
574                         log.err(None, 'Error while processing potential block:')
575                     
576                     for aux_work, index, hashes in mm_later:
577                         try:
578                             if pow_hash <= aux_work['target'] or p2pool.DEBUG:
579                                 df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)(
580                                     pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
581                                     bitcoin_data.aux_pow_type.pack(dict(
582                                         merkle_tx=dict(
583                                             tx=transactions[0],
584                                             block_hash=header_hash,
585                                             merkle_branch=merkle_branch,
586                                             index=0,
587                                         ),
588                                         merkle_branch=bitcoin_data.calculate_merkle_branch(hashes, index),
589                                         index=index,
590                                         parent_block_header=header,
591                                     )).encode('hex'),
592                                 )
593                                 @df.addCallback
594                                 def _(result):
595                                     if result != (pow_hash <= aux_work['target']):
596                                         print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
597                                     else:
598                                         print 'Merged block submittal result: %s' % (result,)
599                                 @df.addErrback
600                                 def _(err):
601                                     log.err(err, 'Error submitting merged block:')
602                         except:
603                             log.err(None, 'Error while processing merged mining POW:')
604                     
605                     if pow_hash <= share_info['bits'].target:
606                         share = p2pool_data.Share(net, header, share_info, merkle_branch=merkle_branch, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
607                         print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
608                             request.getUser(),
609                             p2pool_data.format_hash(share.hash),
610                             p2pool_data.format_hash(share.previous_hash),
611                             time.time() - getwork_time,
612                             ' DEAD ON ARRIVAL' if not on_time else '',
613                         )
614                         my_share_hashes.add(share.hash)
615                         if not on_time:
616                             my_doa_share_hashes.add(share.hash)
617                         
618                         tracker.add(share)
619                         if not p2pool.DEBUG:
620                             tracker.verified.add(share)
621                         set_real_work2()
622                         
623                         try:
624                             if pow_hash <= header['bits'].target or p2pool.DEBUG:
625                                 for peer in p2p_node.peers.itervalues():
626                                     peer.sendShares([share])
627                                 shared_share_hashes.add(share.hash)
628                         except:
629                             log.err(None, 'Error forwarding block solution:')
630                     
631                     if pow_hash <= target and header_hash not in received_header_hashes:
632                         reactor.callLater(1, grapher.add_localrate_point, bitcoin_data.target_to_average_attempts(target), not on_time)
633                         if request.getPassword() == vip_pass:
634                             reactor.callLater(1, grapher.add_localminer_point, request.getUser(), bitcoin_data.target_to_average_attempts(target), not on_time)
635                         self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
636                         while len(self.recent_shares_ts_work) > 50:
637                             self.recent_shares_ts_work.pop(0)
638                         local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=request.getUser()))
639                     
640                     if header_hash in received_header_hashes:
641                         print >>sys.stderr, 'Worker %s @ %s submitted share more than once!' % (request.getUser(), request.getClientIP())
642                     received_header_hashes.add(header_hash)
643                     
644                     if pow_hash > target:
645                         print 'Worker %s submitted share with hash > target:' % (request.getUser(),)
646                         print '    Hash:   %56x' % (pow_hash,)
647                         print '    Target: %56x' % (target,)
648                     
649                     return on_time
650                 
651                 return ba, got_response
652         
653         web_root = resource.Resource()
654         worker_interface.WorkerInterface(WorkerBridge()).attach_to(web_root)
655         
656         def get_rate():
657             if tracker.get_height(current_work.value['best_share_hash']) < 720:
658                 return json.dumps(None)
659             return json.dumps(p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
660                 / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720)))
661         
662         def get_users():
663             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
664             weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
665             res = {}
666             for script in sorted(weights, key=lambda s: weights[s]):
667                 res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
668             return json.dumps(res)
669         
670         def get_current_txouts():
671             share = tracker.shares[current_work.value['best_share_hash']]
672             share_info, gentx = p2pool_data.generate_transaction(tracker, share.share_info['share_data'], share.header['bits'].target, share.share_info['timestamp'], share.net)
673             return dict((out['script'], out['value']) for out in gentx['tx_outs'])
674         
675         def get_current_scaled_txouts(scale, trunc=0):
676             txouts = get_current_txouts()
677             total = sum(txouts.itervalues())
678             results = dict((script, value*scale//total) for script, value in txouts.iteritems())
679             if trunc > 0:
680                 total_random = 0
681                 random_set = set()
682                 for s in sorted(results, key=results.__getitem__):
683                     if results[s] >= trunc:
684                         break
685                     total_random += results[s]
686                     random_set.add(s)
687                 if total_random:
688                     winner = math.weighted_choice((script, results[script]) for script in random_set)
689                     for script in random_set:
690                         del results[script]
691                     results[winner] = total_random
692             if sum(results.itervalues()) < int(scale):
693                 results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
694             return results
695         
696         def get_current_payouts():
697             return json.dumps(dict((bitcoin_data.script2_to_human(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems()))
698         
699         def get_patron_sendmany(this):
700             try:
701                 if '/' in this:
702                     this, trunc = this.split('/', 1)
703                 else:
704                     trunc = '0.01'
705                 return json.dumps(dict(
706                     (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
707                     for script, value in get_current_scaled_txouts(scale=int(float(this)*1e8), trunc=int(float(trunc)*1e8)).iteritems()
708                     if bitcoin_data.script2_to_address(script, net.PARENT) is not None
709                 ))
710             except:
711                 return json.dumps(None)
712         
713         def get_global_stats():
714             # averaged over last hour
715             lookbehind = 3600//net.SHARE_PERIOD
716             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
717                 return None
718             
719             nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
720             stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
721             return json.dumps(dict(
722                 pool_nonstale_hash_rate=nonstale_hash_rate,
723                 pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
724                 pool_stale_prop=stale_prop,
725             ))
726         
727         def get_local_stats():
728             lookbehind = 3600//net.SHARE_PERIOD
729             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
730                 return None
731             
732             global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
733             
734             my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
735             my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
736             my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
737             my_share_count = my_unstale_count + my_orphan_count + my_doa_count
738             my_stale_count = my_orphan_count + my_doa_count
739             
740             my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
741             
742             my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
743                 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1)
744                 if share.hash in my_share_hashes)
745             actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp -
746                 tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp)
747             share_att_s = my_work / actual_time
748             
749             miner_hash_rates = {}
750             miner_dead_hash_rates = {}
751             datums, dt = local_rate_monitor.get_datums_in_last()
752             for datum in datums:
753                 miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
754                 if datum['dead']:
755                     miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
756             
757             return json.dumps(dict(
758                 my_hash_rates_in_last_hour=dict(
759                     note="DEPRECATED",
760                     nonstale=share_att_s,
761                     rewarded=share_att_s/(1 - global_stale_prop),
762                     actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
763                 ),
764                 my_share_counts_in_last_hour=dict(
765                     shares=my_share_count,
766                     unstale_shares=my_unstale_count,
767                     stale_shares=my_stale_count,
768                     orphan_stale_shares=my_orphan_count,
769                     doa_stale_shares=my_doa_count,
770                 ),
771                 my_stale_proportions_in_last_hour=dict(
772                     stale=my_stale_prop,
773                     orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
774                     dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
775                 ),
776                 miner_hash_rates=miner_hash_rates,
777                 miner_dead_hash_rates=miner_dead_hash_rates,
778             ))
779         
780         def get_peer_addresses():
781             return ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues())
782         
783         def get_uptime():
784             return json.dumps(time.time() - start_time)
785         
786         class WebInterface(resource.Resource):
787             def __init__(self, func, mime_type, *fields):
788                 self.func, self.mime_type, self.fields = func, mime_type, fields
789             
790             def render_GET(self, request):
791                 request.setHeader('Content-Type', self.mime_type)
792                 request.setHeader('Access-Control-Allow-Origin', '*')
793                 return self.func(*(request.args[field][0] for field in self.fields))
794         
795         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
796         web_root.putChild('users', WebInterface(get_users, 'application/json'))
797         web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
798         web_root.putChild('current_payouts', WebInterface(get_current_payouts, 'application/json'))
799         web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain', 'total'))
800         web_root.putChild('global_stats', WebInterface(get_global_stats, 'application/json'))
801         web_root.putChild('local_stats', WebInterface(get_local_stats, 'application/json'))
802         web_root.putChild('peer_addresses', WebInterface(get_peer_addresses, 'text/plain'))
803         web_root.putChild('payout_addr', WebInterface(lambda: json.dumps(bitcoin_data.script2_to_human(my_script, net.PARENT)), 'application/json'))
804         web_root.putChild('recent_blocks', WebInterface(lambda: json.dumps(recent_blocks), 'application/json'))
805         web_root.putChild('uptime', WebInterface(get_uptime, 'application/json'))
806         if draw is not None:
807             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
808         
809         new_root = resource.Resource()
810         web_root.putChild('web', new_root)
811         
812         stat_log = []
813         if os.path.exists(os.path.join(datadir_path, 'stats')):
814             try:
815                 with open(os.path.join(datadir_path, 'stats'), 'rb') as f:
816                     stat_log = json.loads(f.read())
817             except:
818                 log.err(None, 'Error loading stats:')
819         def update_stat_log():
820             while stat_log and stat_log[0]['time'] < time.time() - 24*60*60:
821                 stat_log.pop(0)
822             
823             lookbehind = 3600//net.SHARE_PERIOD
824             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
825                 return None
826             
827             global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
828             (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
829             
830             miner_hash_rates = {}
831             miner_dead_hash_rates = {}
832             datums, dt = local_rate_monitor.get_datums_in_last()
833             for datum in datums:
834                 miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
835                 if datum['dead']:
836                     miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
837             
838             stat_log.append(dict(
839                 time=time.time(),
840                 pool_hash_rate=p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)/(1-global_stale_prop),
841                 pool_stale_prop=global_stale_prop,
842                 local_hash_rates=miner_hash_rates,
843                 local_dead_hash_rates=miner_dead_hash_rates,
844                 shares=shares,
845                 stale_shares=stale_orphan_shares + stale_doa_shares,
846                 stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares),
847                 current_payout=get_current_txouts().get(my_script, 0)*1e-8,
848             ))
849             
850             with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
851                 f.write(json.dumps(stat_log))
852         task.LoopingCall(update_stat_log).start(5*60)
853         new_root.putChild('log', WebInterface(lambda: json.dumps(stat_log), 'application/json'))
854         
855         grapher = graphs.Grapher(os.path.join(datadir_path, 'rrd'))
856         web_root.putChild('graphs', grapher.get_resource())
857         def add_point():
858             if tracker.get_height(current_work.value['best_share_hash']) < 720:
859                 return
860             nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
861             poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
862             grapher.add_poolrate_point(poolrate, poolrate - nonstalerate)
863         task.LoopingCall(add_point).start(100)
864         
865         def attempt_listen():
866             try:
867                 reactor.listenTCP(worker_endpoint[1], server.Site(web_root), interface=worker_endpoint[0])
868             except error.CannotListenError, e:
869                 print >>sys.stderr, 'Error binding to worker port: %s. Retrying in 1 second.' % (e.socketError,)
870                 reactor.callLater(1, attempt_listen)
871             else:
872                 with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
873                     pass
874         attempt_listen()
875         
876         print '    ...success!'
877         print
878         
879         
880         @defer.inlineCallbacks
881         def work_poller():
882             while True:
883                 flag = factory.new_block.get_deferred()
884                 try:
885                     yield set_real_work1()
886                 except:
887                     log.err()
888                 yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
889         work_poller()
890         
891         
892         # done!
893         print 'Started successfully!'
894         print
895         
896         
897         if hasattr(signal, 'SIGALRM'):
898             signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
899                 sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
900             ))
901             signal.siginterrupt(signal.SIGALRM, False)
902             task.LoopingCall(signal.alarm, 30).start(1)
903         
904         if args.irc_announce:
905             from twisted.words.protocols import irc
906             class IRCClient(irc.IRCClient):
907                 nickname = 'p2pool%02i' % (random.randrange(100),)
908                 channel = '#p2pool' if net.NAME == 'bitcoin' else '#p2pool-alt'
909                 def lineReceived(self, line):
910                     print repr(line)
911                     irc.IRCClient.lineReceived(self, line)
912                 def signedOn(self):
913                     irc.IRCClient.signedOn(self)
914                     self.factory.resetDelay()
915                     self.join(self.channel)
916                     self.watch_id = tracker.verified.added.watch(self._new_share)
917                     self.announced_hashes = set()
918                     self.delayed_messages = {}
919                 def privmsg(self, user, channel, message):
920                     if channel == self.channel and message in self.delayed_messages:
921                         self.delayed_messages.pop(message).cancel()
922                 def _new_share(self, share):
923                     if share.pow_hash <= share.header['bits'].target and share.header_hash not in self.announced_hashes and abs(share.timestamp - time.time()) < 10*60:
924                         self.announced_hashes.add(share.header_hash)
925                         message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.share_data['new_script'], net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
926                         self.delayed_messages[message] = reactor.callLater(random.expovariate(1/5), lambda: (self.say(self.channel, message), self.delayed_messages.pop(message)))
927                 def connectionLost(self, reason):
928                     tracker.verified.added.unwatch(self.watch_id)
929                     print 'IRC connection lost:', reason.getErrorMessage()
930             class IRCClientFactory(protocol.ReconnectingClientFactory):
931                 protocol = IRCClient
932             reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())
933         
934         @defer.inlineCallbacks
935         def status_thread():
936             last_str = None
937             last_time = 0
938             while True:
939                 yield deferral.sleep(3)
940                 try:
941                     if time.time() > current_work2.value['last_update'] + 60:
942                         print >>sys.stderr, '''---> LOST CONTACT WITH BITCOIND for %s! Check that it isn't frozen or dead! <---''' % (math.format_dt(time.time() - current_work2.value['last_update']),)
943                     
944                     height = tracker.get_height(current_work.value['best_share_hash'])
945                     this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
946                         height,
947                         len(tracker.verified.shares),
948                         len(tracker.shares),
949                         len(p2p_node.peers),
950                         sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
951                     ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
952                     
953                     datums, dt = local_rate_monitor.get_datums_in_last()
954                     my_att_s = sum(datum['work']/dt for datum in datums)
955                     this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
956                         math.format(int(my_att_s)),
957                         math.format_dt(dt),
958                         math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95),
959                         math.format_dt(2**256 / tracker.shares[current_work.value['best_share_hash']].target / my_att_s) if my_att_s else '???',
960                     )
961                     
962                     if height > 2:
963                         (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
964                         stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], min(720, height))
965                         real_att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720)) / (1 - stale_prop)
966                         
967                         this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % (
968                             shares, stale_orphan_shares, stale_doa_shares,
969                             math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
970                             math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
971                             get_current_txouts().get(my_script, 0)*1e-8, net.PARENT.SYMBOL,
972                         )
973                         this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
974                             math.format(int(real_att_s)),
975                             100*stale_prop,
976                             math.format_dt(2**256 / current_work.value['bits'].target / real_att_s),
977                         )
978                     
979                     if this_str != last_str or time.time() > last_time + 15:
980                         print this_str
981                         last_str = this_str
982                         last_time = time.time()
983                 except:
984                     log.err()
985         status_thread()
986     except:
987         log.err(None, 'Fatal error:')
988         reactor.stop()
989
990 def run():
991     class FixedArgumentParser(argparse.ArgumentParser):
992         def _read_args_from_files(self, arg_strings):
993             # expand arguments referencing files
994             new_arg_strings = []
995             for arg_string in arg_strings:
996                 
997                 # for regular arguments, just add them back into the list
998                 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
999                     new_arg_strings.append(arg_string)
1000                 
1001                 # replace arguments referencing files with the file content
1002                 else:
1003                     try:
1004                         args_file = open(arg_string[1:])
1005                         try:
1006                             arg_strings = []
1007                             for arg_line in args_file.read().splitlines():
1008                                 for arg in self.convert_arg_line_to_args(arg_line):
1009                                     arg_strings.append(arg)
1010                             arg_strings = self._read_args_from_files(arg_strings)
1011                             new_arg_strings.extend(arg_strings)
1012                         finally:
1013                             args_file.close()
1014                     except IOError:
1015                         err = sys.exc_info()[1]
1016                         self.error(str(err))
1017             
1018             # return the modified argument list
1019             return new_arg_strings
1020         
1021         def convert_arg_line_to_args(self, arg_line):
1022             return [arg for arg in arg_line.split() if arg.strip()]
1023     
1024     
1025     realnets=dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
1026     
1027     parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
1028     parser.add_argument('--version', action='version', version=p2pool.__version__)
1029     parser.add_argument('--net',
1030         help='use specified network (default: bitcoin)',
1031         action='store', choices=sorted(realnets), default='bitcoin', dest='net_name')
1032     parser.add_argument('--testnet',
1033         help='''use the network's testnet''',
1034         action='store_const', const=True, default=False, dest='testnet')
1035     parser.add_argument('--debug',
1036         help='enable debugging mode',
1037         action='store_const', const=True, default=False, dest='debug')
1038     parser.add_argument('-a', '--address',
1039         help='generate payouts to this address (default: <address requested from bitcoind>)',
1040         type=str, action='store', default=None, dest='address')
1041     parser.add_argument('--datadir',
1042         help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
1043         type=str, action='store', default=None, dest='datadir')
1044     parser.add_argument('--logfile',
1045         help='''log to this file (default: data/<NET>/log)''',
1046         type=str, action='store', default=None, dest='logfile')
1047     parser.add_argument('--merged',
1048         help='call getauxblock on this url to get work for merged mining (example: http://ncuser:ncpass@127.0.0.1:10332/)',
1049         type=str, action='append', default=[], dest='merged_urls')
1050     parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
1051         help='donate this percentage of work to author of p2pool (default: 0.5)',
1052         type=float, action='store', default=0.5, dest='donation_percentage')
1053     parser.add_argument('--irc-announce',
1054         help='announce any blocks found on irc://irc.freenode.net/#p2pool',
1055         action='store_true', default=False, dest='irc_announce')
1056     
1057     p2pool_group = parser.add_argument_group('p2pool interface')
1058     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
1059         help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
1060         type=int, action='store', default=None, dest='p2pool_port')
1061     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
1062         help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
1063         type=str, action='append', default=[], dest='p2pool_nodes')
1064     parser.add_argument('--disable-upnp',
1065         help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
1066         action='store_false', default=True, dest='upnp')
1067     
1068     worker_group = parser.add_argument_group('worker interface')
1069     worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT',
1070         help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
1071         type=str, action='store', default=None, dest='worker_endpoint')
1072     worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
1073         help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
1074         type=float, action='store', default=0, dest='worker_fee')
1075     
1076     bitcoind_group = parser.add_argument_group('bitcoind interface')
1077     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
1078         help='connect to this address (default: 127.0.0.1)',
1079         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
1080     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
1081         help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
1082         type=int, action='store', default=None, dest='bitcoind_rpc_port')
1083     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
1084         help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
1085         type=int, action='store', default=None, dest='bitcoind_p2p_port')
1086     
1087     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
1088         help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
1089         type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
1090     
1091     args = parser.parse_args()
1092     
1093     if args.debug:
1094         p2pool.DEBUG = True
1095     
1096     net_name = args.net_name + ('_testnet' if args.testnet else '')
1097     net = networks.nets[net_name]
1098     
1099     datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
1100     if not os.path.exists(datadir_path):
1101         os.makedirs(datadir_path)
1102     
1103     if len(args.bitcoind_rpc_userpass) > 2:
1104         parser.error('a maximum of two arguments are allowed')
1105     args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
1106     
1107     if args.bitcoind_rpc_password is None:
1108         if not hasattr(net.PARENT, 'CONF_FILE_FUNC'):
1109             parser.error('This network has no configuration file function. Manually enter your RPC password.')
1110         conf_path = net.PARENT.CONF_FILE_FUNC()
1111         if not os.path.exists(conf_path):
1112             parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
1113                 '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
1114                 '''\r\n'''
1115                 '''server=1\r\n'''
1116                 '''rpcpassword=%x''' % (conf_path, random.randrange(2**128)))
1117         with open(conf_path, 'rb') as f:
1118             cp = ConfigParser.RawConfigParser()
1119             cp.readfp(StringIO.StringIO('[x]\r\n' + f.read()))
1120             for conf_name, var_name, var_type in [
1121                 ('rpcuser', 'bitcoind_rpc_username', str),
1122                 ('rpcpassword', 'bitcoind_rpc_password', str),
1123                 ('rpcport', 'bitcoind_rpc_port', int),
1124                 ('port', 'bitcoind_p2p_port', int),
1125             ]:
1126                 if getattr(args, var_name) is None and cp.has_option('x', conf_name):
1127                     setattr(args, var_name, var_type(cp.get('x', conf_name)))
1128     
1129     if args.bitcoind_rpc_username is None:
1130         args.bitcoind_rpc_username = ''
1131     
1132     if args.bitcoind_rpc_port is None:
1133         args.bitcoind_rpc_port = net.PARENT.RPC_PORT
1134     
1135     if args.bitcoind_p2p_port is None:
1136         args.bitcoind_p2p_port = net.PARENT.P2P_PORT
1137     
1138     if args.p2pool_port is None:
1139         args.p2pool_port = net.P2P_PORT
1140     
1141     if args.worker_endpoint is None:
1142         worker_endpoint = '', net.WORKER_PORT
1143     elif ':' not in args.worker_endpoint:
1144         worker_endpoint = '', int(args.worker_endpoint)
1145     else:
1146         addr, port = args.worker_endpoint.rsplit(':', 1)
1147         worker_endpoint = addr, int(port)
1148     
1149     if args.address is not None:
1150         try:
1151             args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
1152         except Exception, e:
1153             parser.error('error parsing address: ' + repr(e))
1154     else:
1155         args.pubkey_hash = None
1156     
1157     def separate_url(url):
1158         s = urlparse.urlsplit(url)
1159         if '@' not in s.netloc:
1160             parser.error('merged url netloc must contain an "@"')
1161         userpass, new_netloc = s.netloc.rsplit('@', 1)
1162         return urlparse.urlunsplit(s._replace(netloc=new_netloc)), userpass
1163     merged_urls = map(separate_url, args.merged_urls)
1164     
1165     if args.logfile is None:
1166         args.logfile = os.path.join(datadir_path, 'log')
1167     
1168     logfile = logging.LogFile(args.logfile)
1169     pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
1170     sys.stdout = logging.AbortPipe(pipe)
1171     sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
1172     if hasattr(signal, "SIGUSR1"):
1173         def sigusr1(signum, frame):
1174             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
1175             logfile.reopen()
1176             print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
1177         signal.signal(signal.SIGUSR1, sigusr1)
1178     task.LoopingCall(logfile.reopen).start(5)
1179     
1180     reactor.callWhenRunning(main, args, net, datadir_path, merged_urls, worker_endpoint)
1181     reactor.run()