added .new_script back to Share
[p2pool.git] / p2pool / main.py
1 from __future__ import division
2
3 import ConfigParser
4 import StringIO
5 import argparse
6 import os
7 import random
8 import struct
9 import sys
10 import time
11 import json
12 import signal
13 import traceback
14 import urlparse
15
16 from twisted.internet import defer, error, reactor, protocol, task
17 from twisted.web import server, resource
18 from twisted.python import log
19 from nattraverso import portmapper, ipdiscover
20
21 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
22 from bitcoin import worker_interface
23 from util import expiring_dict, jsonrpc, variable, deferral, math, logging, pack
24 from . import p2p, networks, graphs
25 import p2pool, p2pool.data as p2pool_data
26
27 @deferral.retry('Error getting work from bitcoind:', 3)
28 @defer.inlineCallbacks
29 def getwork(bitcoind):
30     try:
31         work = yield bitcoind.rpc_getmemorypool()
32     except jsonrpc.Error, e:
33         if e.code == -32601: # Method not found
34             print >>sys.stderr, 'Error: Bitcoin version too old! Upgrade to v0.5 or newer!'
35             raise deferral.RetrySilentlyException()
36         raise
37     packed_transactions = [x.decode('hex') for x in work['transactions']]
38     defer.returnValue(dict(
39         version=work['version'],
40         previous_block_hash=int(work['previousblockhash'], 16),
41         transactions=map(bitcoin_data.tx_type.unpack, packed_transactions),
42         merkle_branch=bitcoin_data.calculate_merkle_branch([0] + map(bitcoin_data.hash256, packed_transactions), 0),
43         subsidy=work['coinbasevalue'],
44         time=work['time'],
45         bits=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
46         coinbaseflags=work['coinbaseflags'].decode('hex') if 'coinbaseflags' in work else ''.join(x.decode('hex') for x in work['coinbaseaux'].itervalues()) if 'coinbaseaux' in work else '',
47     ))
48
49 @defer.inlineCallbacks
50 def main(args, net, datadir_path, merged_urls, worker_endpoint):
51     try:
52         print 'p2pool (version %s)' % (p2pool.__version__,)
53         print
54         try:
55             from . import draw
56         except ImportError:
57             draw = None
58             print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
59             print
60         
61         # connect to bitcoind over JSON-RPC and do initial getmemorypool
62         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
63         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
64         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
65         good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.PARENT.RPC_CHECK)(bitcoind)
66         if not good:
67             print >>sys.stderr, "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
68             return
69         temp_work = yield getwork(bitcoind)
70         print '    ...success!'
71         print '    Current block hash: %x' % (temp_work['previous_block_hash'],)
72         print
73         
74         # connect to bitcoind over bitcoin-p2p
75         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
76         factory = bitcoin_p2p.ClientFactory(net.PARENT)
77         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
78         yield factory.getProtocol() # waits until handshake is successful
79         print '    ...success!'
80         print
81         
82         print 'Determining payout address...'
83         if args.pubkey_hash is None:
84             address_path = os.path.join(datadir_path, 'cached_payout_address')
85             
86             if os.path.exists(address_path):
87                 with open(address_path, 'rb') as f:
88                     address = f.read().strip('\r\n')
89                 print '    Loaded cached address: %s...' % (address,)
90             else:
91                 address = None
92             
93             if address is not None:
94                 res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))()
95                 if not res['isvalid'] or not res['ismine']:
96                     print '    Cached address is either invalid or not controlled by local bitcoind!'
97                     address = None
98             
99             if address is None:
100                 print '    Getting payout address from bitcoind...'
101                 address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()
102             
103             with open(address_path, 'wb') as f:
104                 f.write(address)
105             
106             my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(address, net.PARENT)
107         else:
108             my_pubkey_hash = args.pubkey_hash
109         print '    ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)
110         print
111         
112         my_share_hashes = set()
113         my_doa_share_hashes = set()
114         
115         tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes)
116         shared_share_hashes = set()
117         ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
118         known_verified = set()
119         recent_blocks = []
120         print "Loading shares..."
121         for i, (mode, contents) in enumerate(ss.get_shares()):
122             if mode == 'share':
123                 if contents.hash in tracker.shares:
124                     continue
125                 shared_share_hashes.add(contents.hash)
126                 contents.time_seen = 0
127                 tracker.add(contents)
128                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
129                     print "    %i" % (len(tracker.shares),)
130             elif mode == 'verified_hash':
131                 known_verified.add(contents)
132             else:
133                 raise AssertionError()
134         print "    ...inserting %i verified shares..." % (len(known_verified),)
135         for h in known_verified:
136             if h not in tracker.shares:
137                 ss.forget_verified_share(h)
138                 continue
139             tracker.verified.add(tracker.shares[h])
140         print "    ...done loading %i shares!" % (len(tracker.shares),)
141         print
142         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
143         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
144         tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
145         
146         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
147         
148         pre_current_work = variable.Variable(None)
149         pre_merged_work = variable.Variable({})
150         # information affecting work that should trigger a long-polling update
151         current_work = variable.Variable(None)
152         # information affecting work that should not trigger a long-polling update
153         current_work2 = variable.Variable(None)
154         
155         requested = expiring_dict.ExpiringDict(300)
156         
157         print 'Initializing work...'
158         @defer.inlineCallbacks
159         def set_real_work1():
160             work = yield getwork(bitcoind)
161             current_work2.set(dict(
162                 time=work['time'],
163                 transactions=work['transactions'],
164                 merkle_branch=work['merkle_branch'],
165                 subsidy=work['subsidy'],
166                 clock_offset=time.time() - work['time'],
167                 last_update=time.time(),
168             )) # second set first because everything hooks on the first
169             pre_current_work.set(dict(
170                 version=work['version'],
171                 previous_block=work['previous_block_hash'],
172                 bits=work['bits'],
173                 coinbaseflags=work['coinbaseflags'],
174             ))
175         yield set_real_work1()
176         
177         if '\ngetblock ' in (yield deferral.retry()(bitcoind.rpc_help)()):
178             height_cacher = deferral.DeferredCacher(defer.inlineCallbacks(lambda block_hash: defer.returnValue((yield bitcoind.rpc_getblock('%x' % (block_hash,)))['blockcount'])))
179             best_height_cached = variable.Variable((yield deferral.retry()(height_cacher)(pre_current_work.value['previous_block'])))
180             def get_height_rel_highest(block_hash):
181                 this_height = height_cacher.call_now(block_hash, 0)
182                 best_height = height_cacher.call_now(pre_current_work.value['previous_block'], 0)
183                 best_height_cached.set(max(best_height_cached.value, this_height, best_height))
184                 return this_height - best_height_cached.value
185         else:
186             get_height_rel_highest = bitcoin_p2p.HeightTracker(bitcoind, factory, 5*net.SHARE_PERIOD*net.CHAIN_LENGTH/net.PARENT.BLOCK_PERIOD).get_height_rel_highest
187         
188         def set_real_work2():
189             best, desired = tracker.think(get_height_rel_highest, pre_current_work.value['previous_block'], pre_current_work.value['bits'])
190             
191             t = dict(pre_current_work.value)
192             t['best_share_hash'] = best
193             t['mm_chains'] = pre_merged_work.value
194             current_work.set(t)
195             
196             t = time.time()
197             for peer2, share_hash in desired:
198                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
199                     continue
200                 last_request_time, count = requested.get(share_hash, (None, 0))
201                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
202                     continue
203                 potential_peers = set()
204                 for head in tracker.tails[share_hash]:
205                     potential_peers.update(peer_heads.get(head, set()))
206                 potential_peers = [peer for peer in potential_peers if peer.connected2]
207                 if count == 0 and peer2 is not None and peer2.connected2:
208                     peer = peer2
209                 else:
210                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
211                     if peer is None:
212                         continue
213                 
214                 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
215                 peer.send_getshares(
216                     hashes=[share_hash],
217                     parents=2000,
218                     stops=list(set(tracker.heads) | set(
219                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
220                     ))[:100],
221                 )
222                 requested[share_hash] = t, count + 1
223         pre_current_work.changed.watch(lambda _: set_real_work2())
224         pre_merged_work.changed.watch(lambda _: set_real_work2())
225         set_real_work2()
226         print '    ...success!'
227         print
228         
229         
230         @defer.inlineCallbacks
231         def set_merged_work(merged_url, merged_userpass):
232             merged_proxy = jsonrpc.Proxy(merged_url, (merged_userpass,))
233             while True:
234                 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged_proxy.rpc_getauxblock)()
235                 pre_merged_work.set(dict(pre_merged_work.value, **{auxblock['chainid']: dict(
236                     hash=int(auxblock['hash'], 16),
237                     target=pack.IntType(256).unpack(auxblock['target'].decode('hex')),
238                     merged_proxy=merged_proxy,
239                 )}))
240                 yield deferral.sleep(1)
241         for merged_url, merged_userpass in merged_urls:
242             set_merged_work(merged_url, merged_userpass)
243         
244         @pre_merged_work.changed.watch
245         def _(new_merged_work):
246             print 'Got new merged mining work!'
247         
248         # setup p2p logic and join p2pool network
249         
250         class Node(p2p.Node):
251             def handle_shares(self, shares, peer):
252                 if len(shares) > 5:
253                     print 'Processing %i shares from %s...' % (len(shares), '%s:%i' % peer.addr if peer is not None else None)
254                 
255                 new_count = 0
256                 for share in shares:
257                     if share.hash in tracker.shares:
258                         #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
259                         continue
260                     
261                     new_count += 1
262                     
263                     #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
264                     
265                     tracker.add(share)
266                 
267                 if shares and peer is not None:
268                     peer_heads.setdefault(shares[0].hash, set()).add(peer)
269                 
270                 if new_count:
271                     set_real_work2()
272                 
273                 if len(shares) > 5:
274                     print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
275             
276             def handle_share_hashes(self, hashes, peer):
277                 t = time.time()
278                 get_hashes = []
279                 for share_hash in hashes:
280                     if share_hash in tracker.shares:
281                         continue
282                     last_request_time, count = requested.get(share_hash, (None, 0))
283                     if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
284                         continue
285                     print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
286                     get_hashes.append(share_hash)
287                     requested[share_hash] = t, count + 1
288                 
289                 if hashes and peer is not None:
290                     peer_heads.setdefault(hashes[0], set()).add(peer)
291                 if get_hashes:
292                     peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
293             
294             def handle_get_shares(self, hashes, parents, stops, peer):
295                 parents = min(parents, 1000//len(hashes))
296                 stops = set(stops)
297                 shares = []
298                 for share_hash in hashes:
299                     for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))):
300                         if share.hash in stops:
301                             break
302                         shares.append(share)
303                 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
304                 peer.sendShares(shares)
305         
306         @tracker.verified.added.watch
307         def _(share):
308             if share.pow_hash <= share.header['bits'].target:
309                 if factory.conn.value is not None:
310                     factory.conn.value.send_block(block=share.as_block(tracker))
311                 else:
312                     print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Erp!'
313                 print
314                 print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % (p2pool_data.format_hash(share.hash), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
315                 print
316                 recent_blocks.append(dict(ts=share.timestamp, hash='%064x' % (share.header_hash,)))
317         
318         print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
319         
320         @defer.inlineCallbacks
321         def parse(x):
322             if ':' in x:
323                 ip, port = x.split(':')
324                 defer.returnValue(((yield reactor.resolve(ip)), int(port)))
325             else:
326                 defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT))
327         
328         addrs = {}
329         if os.path.exists(os.path.join(datadir_path, 'addrs.txt')):
330             try:
331                 addrs.update(dict(eval(x) for x in open(os.path.join(datadir_path, 'addrs.txt'))))
332             except:
333                 print >>sys.stderr, "error reading addrs"
334         for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
335             try:
336                 addr = yield addr_df
337                 if addr not in addrs:
338                     addrs[addr] = (0, time.time(), time.time())
339             except:
340                 log.err()
341         
342         connect_addrs = set()
343         for addr_df in map(parse, args.p2pool_nodes):
344             try:
345                 connect_addrs.add((yield addr_df))
346             except:
347                 log.err()
348         
349         p2p_node = Node(
350             best_share_hash_func=lambda: current_work.value['best_share_hash'],
351             port=args.p2pool_port,
352             net=net,
353             addr_store=addrs,
354             connect_addrs=connect_addrs,
355         )
356         p2p_node.start()
357         
358         def save_addrs():
359             open(os.path.join(datadir_path, 'addrs.txt'), 'w').writelines(repr(x) + '\n' for x in p2p_node.addr_store.iteritems())
360         task.LoopingCall(save_addrs).start(60)
361         
362         # send share when the chain changes to their chain
363         def work_changed(new_work):
364             #print 'Work changed:', new_work
365             shares = []
366             for share in tracker.get_chain(new_work['best_share_hash'], min(5, tracker.get_height(new_work['best_share_hash']))):
367                 if share.hash in shared_share_hashes:
368                     break
369                 shared_share_hashes.add(share.hash)
370                 shares.append(share)
371             
372             for peer in p2p_node.peers.itervalues():
373                 peer.sendShares([share for share in shares if share.peer is not peer])
374         
375         current_work.changed.watch(work_changed)
376         
377         def save_shares():
378             for share in tracker.get_chain(current_work.value['best_share_hash'], min(tracker.get_height(current_work.value['best_share_hash']), 2*net.CHAIN_LENGTH)):
379                 ss.add_share(share)
380                 if share.hash in tracker.verified.shares:
381                     ss.add_verified_hash(share.hash)
382         task.LoopingCall(save_shares).start(60)
383         
384         print '    ...success!'
385         print
386         
387         start_time = time.time()
388         
389         @defer.inlineCallbacks
390         def upnp_thread():
391             while True:
392                 try:
393                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
394                     if is_lan:
395                         pm = yield portmapper.get_port_mapper()
396                         yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
397                 except defer.TimeoutError:
398                     pass
399                 except:
400                     if p2pool.DEBUG:
401                         log.err(None, "UPnP error:")
402                 yield deferral.sleep(random.expovariate(1/120))
403         
404         if args.upnp:
405             upnp_thread()
406         
407         # start listening for workers with a JSON-RPC server
408         
409         print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1])
410         
411         if os.path.exists(os.path.join(datadir_path, 'vip_pass')):
412             with open(os.path.join(datadir_path, 'vip_pass'), 'rb') as f:
413                 vip_pass = f.read().strip('\r\n')
414         else:
415             vip_pass = '%016x' % (random.randrange(2**64),)
416             with open(os.path.join(datadir_path, 'vip_pass'), 'wb') as f:
417                 f.write(vip_pass)
418         print '    Worker password:', vip_pass, '(only required for generating graphs)'
419         
420         # setup worker logic
421         
422         removed_unstales_var = variable.Variable((0, 0, 0))
423         @tracker.verified.removed.watch
424         def _(share):
425             if share.hash in my_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
426                 assert share.share_data['stale_info'] in [0, 253, 254] # we made these shares in this instance
427                 removed_unstales_var.set((
428                     removed_unstales_var.value[0] + 1,
429                     removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 253 else 0),
430                     removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 254 else 0),
431                 ))
432         
433         removed_doa_unstales_var = variable.Variable(0)
434         @tracker.verified.removed.watch
435         def _(share):
436             if share.hash in my_doa_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
437                 removed_doa_unstales.set(removed_doa_unstales.value + 1)
438         
439         def get_stale_counts():
440             '''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)'''
441             my_shares = len(my_share_hashes)
442             my_doa_shares = len(my_doa_share_hashes)
443             delta = tracker.verified.get_delta(current_work.value['best_share_hash'])
444             my_shares_in_chain = delta.my_count + removed_unstales_var.value[0]
445             my_doa_shares_in_chain = delta.my_doa_count + removed_doa_unstales_var.value
446             orphans_recorded_in_chain = delta.my_orphan_announce_count + removed_unstales_var.value[1]
447             doas_recorded_in_chain = delta.my_dead_announce_count + removed_unstales_var.value[2]
448             
449             my_shares_not_in_chain = my_shares - my_shares_in_chain
450             my_doa_shares_not_in_chain = my_doa_shares - my_doa_shares_in_chain
451             
452             return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
453         
454         
455         local_rate_monitor = math.RateMonitor(10*60)
456         
457         class WorkerBridge(worker_interface.WorkerBridge):
458             def __init__(self):
459                 worker_interface.WorkerBridge.__init__(self)
460                 self.new_work_event = current_work.changed
461                 self.recent_shares_ts_work = []
462             
463             def _get_payout_pubkey_hash_from_username(self, user):
464                 if user is None:
465                     return None
466                 try:
467                     return bitcoin_data.address_to_pubkey_hash(user, net.PARENT)
468                 except: # XXX blah
469                     return None
470             
471             def preprocess_request(self, request):
472                 payout_pubkey_hash = self._get_payout_pubkey_hash_from_username(request.getUser())
473                 if payout_pubkey_hash is None or random.uniform(0, 100) < args.worker_fee:
474                     payout_pubkey_hash = my_pubkey_hash
475                 return payout_pubkey_hash,
476             
477             def get_work(self, pubkey_hash):
478                 if len(p2p_node.peers) == 0 and net.PERSIST:
479                     raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
480                 if current_work.value['best_share_hash'] is None and net.PERSIST:
481                     raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
482                 if time.time() > current_work2.value['last_update'] + 60:
483                     raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
484                 
485                 if current_work.value['mm_chains']:
486                     tree, size = bitcoin_data.make_auxpow_tree(current_work.value['mm_chains'])
487                     mm_hashes = [current_work.value['mm_chains'].get(tree.get(i), dict(hash=0))['hash'] for i in xrange(size)]
488                     mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(dict(
489                         merkle_root=bitcoin_data.merkle_hash(mm_hashes),
490                         size=size,
491                         nonce=0,
492                     ))
493                     mm_later = [(aux_work, mm_hashes.index(aux_work['hash']), mm_hashes) for chain_id, aux_work in current_work.value['mm_chains'].iteritems()]
494                 else:
495                     mm_data = ''
496                     mm_later = []
497                 
498                 share_info, generate_tx = p2pool_data.generate_transaction(
499                     tracker=tracker,
500                     share_data=dict(
501                         previous_share_hash=current_work.value['best_share_hash'],
502                         coinbase=(mm_data + current_work.value['coinbaseflags'])[:100],
503                         nonce=struct.pack('<Q', random.randrange(2**64)),
504                         new_script=bitcoin_data.pubkey_hash_to_script2(pubkey_hash),
505                         subsidy=current_work2.value['subsidy'],
506                         donation=math.perfect_round(65535*args.donation_percentage/100),
507                         stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
508                             253 if orphans > orphans_recorded_in_chain else
509                             254 if doas > doas_recorded_in_chain else
510                             0
511                         )(*get_stale_counts()),
512                     ),
513                     block_target=current_work.value['bits'].target,
514                     desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
515                     net=net,
516                 )
517                 
518                 target = net.PARENT.SANE_MAX_TARGET
519                 if len(self.recent_shares_ts_work) == 50:
520                     hash_rate = sum(work for ts, work in self.recent_shares_ts_work)//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
521                     target = min(target, 2**256//(hash_rate * 5))
522                 target = max(target, share_info['bits'].target)
523                 for aux_work in current_work.value['mm_chains'].itervalues():
524                     target = max(target, aux_work['target'])
525                 
526                 transactions = [generate_tx] + list(current_work2.value['transactions'])
527                 merkle_root = bitcoin_data.check_merkle_branch(bitcoin_data.hash256(bitcoin_data.tx_type.pack(generate_tx)), 0, current_work2.value['merkle_branch'])
528                 
529                 getwork_time = time.time()
530                 merkle_branch = current_work2.value['merkle_branch']
531                 
532                 print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % (
533                     bitcoin_data.target_to_difficulty(target),
534                     bitcoin_data.target_to_difficulty(share_info['bits'].target),
535                     current_work2.value['subsidy']*1e-8, net.PARENT.SYMBOL,
536                     len(current_work2.value['transactions']),
537                 )
538                 
539                 ba = bitcoin_getwork.BlockAttempt(
540                     version=current_work.value['version'],
541                     previous_block=current_work.value['previous_block'],
542                     merkle_root=merkle_root,
543                     timestamp=current_work2.value['time'],
544                     bits=current_work.value['bits'],
545                     share_target=target,
546                 )
547                 
548                 received_header_hashes = set()
549                 
550                 def got_response(header, request):
551                     assert header['merkle_root'] == merkle_root
552                     
553                     header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header))
554                     pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
555                     on_time = current_work.value['best_share_hash'] == share_info['share_data']['previous_share_hash']
556                     
557                     try:
558                         if pow_hash <= header['bits'].target or p2pool.DEBUG:
559                             @deferral.retry('Error submitting primary block: (will retry)', 10, 10)
560                             def submit_block():
561                                 if factory.conn.value is None:
562                                     print >>sys.stderr, 'No bitcoind connection when block submittal attempted! %s%32x' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
563                                     raise deferral.RetrySilentlyException()
564                                 factory.conn.value.send_block(block=dict(header=header, txs=transactions))
565                             submit_block()
566                             if pow_hash <= header['bits'].target:
567                                 print
568                                 print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
569                                 print
570                                 recent_blocks.append(dict(ts=time.time(), hash='%064x' % (header_hash,)))
571                     except:
572                         log.err(None, 'Error while processing potential block:')
573                     
574                     for aux_work, index, hashes in mm_later:
575                         try:
576                             if pow_hash <= aux_work['target'] or p2pool.DEBUG:
577                                 df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)(
578                                     pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
579                                     bitcoin_data.aux_pow_type.pack(dict(
580                                         merkle_tx=dict(
581                                             tx=transactions[0],
582                                             block_hash=header_hash,
583                                             merkle_branch=merkle_branch,
584                                             index=0,
585                                         ),
586                                         merkle_branch=bitcoin_data.calculate_merkle_branch(hashes, index),
587                                         index=index,
588                                         parent_block_header=header,
589                                     )).encode('hex'),
590                                 )
591                                 @df.addCallback
592                                 def _(result):
593                                     if result != (pow_hash <= aux_work['target']):
594                                         print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
595                                     else:
596                                         print 'Merged block submittal result: %s' % (result,)
597                                 @df.addErrback
598                                 def _(err):
599                                     log.err(err, 'Error submitting merged block:')
600                         except:
601                             log.err(None, 'Error while processing merged mining POW:')
602                     
603                     if pow_hash <= share_info['bits'].target:
604                         share = p2pool_data.Share(net, header, share_info, merkle_branch=merkle_branch, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
605                         print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
606                             request.getUser(),
607                             p2pool_data.format_hash(share.hash),
608                             p2pool_data.format_hash(share.previous_hash),
609                             time.time() - getwork_time,
610                             ' DEAD ON ARRIVAL' if not on_time else '',
611                         )
612                         my_share_hashes.add(share.hash)
613                         if not on_time:
614                             my_doa_share_hashes.add(share.hash)
615                         
616                         tracker.add(share)
617                         if not p2pool.DEBUG:
618                             tracker.verified.add(share)
619                         set_real_work2()
620                         
621                         try:
622                             if pow_hash <= header['bits'].target or p2pool.DEBUG:
623                                 for peer in p2p_node.peers.itervalues():
624                                     peer.sendShares([share])
625                                 shared_share_hashes.add(share.hash)
626                         except:
627                             log.err(None, 'Error forwarding block solution:')
628                     
629                     if pow_hash <= target and header_hash not in received_header_hashes:
630                         reactor.callLater(1, grapher.add_localrate_point, bitcoin_data.target_to_average_attempts(target), not on_time)
631                         if request.getPassword() == vip_pass:
632                             reactor.callLater(1, grapher.add_localminer_point, request.getUser(), bitcoin_data.target_to_average_attempts(target), not on_time)
633                         self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
634                         while len(self.recent_shares_ts_work) > 50:
635                             self.recent_shares_ts_work.pop(0)
636                         local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=request.getUser()))
637                     
638                     if header_hash in received_header_hashes:
639                         print >>sys.stderr, 'Worker %s @ %s submitted share more than once!' % (request.getUser(), request.getClientIP())
640                     received_header_hashes.add(header_hash)
641                     
642                     if pow_hash > target:
643                         print 'Worker %s submitted share with hash > target:' % (request.getUser(),)
644                         print '    Hash:   %56x' % (pow_hash,)
645                         print '    Target: %56x' % (target,)
646                     
647                     return on_time
648                 
649                 return ba, got_response
650         
651         web_root = resource.Resource()
652         worker_interface.WorkerInterface(WorkerBridge()).attach_to(web_root)
653         
654         def get_rate():
655             if tracker.get_height(current_work.value['best_share_hash']) < 720:
656                 return json.dumps(None)
657             return json.dumps(p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
658                 / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720)))
659         
660         def get_users():
661             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
662             weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
663             res = {}
664             for script in sorted(weights, key=lambda s: weights[s]):
665                 res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
666             return json.dumps(res)
667         
668         def get_current_txouts():
669             share = tracker.shares[current_work.value['best_share_hash']]
670             share_info, gentx = p2pool_data.generate_transaction(tracker, share.share_info['share_data'], share.header['bits'].target, share.share_info['timestamp'], share.net)
671             return dict((out['script'], out['value']) for out in gentx['tx_outs'])
672         
673         def get_current_scaled_txouts(scale, trunc=0):
674             txouts = get_current_txouts()
675             total = sum(txouts.itervalues())
676             results = dict((script, value*scale//total) for script, value in txouts.iteritems())
677             if trunc > 0:
678                 total_random = 0
679                 random_set = set()
680                 for s in sorted(results, key=results.__getitem__):
681                     if results[s] >= trunc:
682                         break
683                     total_random += results[s]
684                     random_set.add(s)
685                 if total_random:
686                     winner = math.weighted_choice((script, results[script]) for script in random_set)
687                     for script in random_set:
688                         del results[script]
689                     results[winner] = total_random
690             if sum(results.itervalues()) < int(scale):
691                 results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
692             return results
693         
694         def get_current_payouts():
695             return json.dumps(dict((bitcoin_data.script2_to_human(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems()))
696         
697         def get_patron_sendmany(this):
698             try:
699                 if '/' in this:
700                     this, trunc = this.split('/', 1)
701                 else:
702                     trunc = '0.01'
703                 return json.dumps(dict(
704                     (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
705                     for script, value in get_current_scaled_txouts(scale=int(float(this)*1e8), trunc=int(float(trunc)*1e8)).iteritems()
706                     if bitcoin_data.script2_to_address(script, net.PARENT) is not None
707                 ))
708             except:
709                 return json.dumps(None)
710         
711         def get_global_stats():
712             # averaged over last hour
713             lookbehind = 3600//net.SHARE_PERIOD
714             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
715                 return None
716             
717             nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
718             stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
719             return json.dumps(dict(
720                 pool_nonstale_hash_rate=nonstale_hash_rate,
721                 pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
722                 pool_stale_prop=stale_prop,
723             ))
724         
725         def get_local_stats():
726             lookbehind = 3600//net.SHARE_PERIOD
727             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
728                 return None
729             
730             global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
731             
732             my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
733             my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
734             my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
735             my_share_count = my_unstale_count + my_orphan_count + my_doa_count
736             my_stale_count = my_orphan_count + my_doa_count
737             
738             my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
739             
740             my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
741                 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1)
742                 if share.hash in my_share_hashes)
743             actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp -
744                 tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp)
745             share_att_s = my_work / actual_time
746             
747             miner_hash_rates = {}
748             miner_dead_hash_rates = {}
749             datums, dt = local_rate_monitor.get_datums_in_last()
750             for datum in datums:
751                 miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
752                 if datum['dead']:
753                     miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
754             
755             return json.dumps(dict(
756                 my_hash_rates_in_last_hour=dict(
757                     note="DEPRECATED",
758                     nonstale=share_att_s,
759                     rewarded=share_att_s/(1 - global_stale_prop),
760                     actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
761                 ),
762                 my_share_counts_in_last_hour=dict(
763                     shares=my_share_count,
764                     unstale_shares=my_unstale_count,
765                     stale_shares=my_stale_count,
766                     orphan_stale_shares=my_orphan_count,
767                     doa_stale_shares=my_doa_count,
768                 ),
769                 my_stale_proportions_in_last_hour=dict(
770                     stale=my_stale_prop,
771                     orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
772                     dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
773                 ),
774                 miner_hash_rates=miner_hash_rates,
775                 miner_dead_hash_rates=miner_dead_hash_rates,
776             ))
777         
778         def get_peer_addresses():
779             return ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues())
780         
781         def get_uptime():
782             return json.dumps(time.time() - start_time)
783         
784         class WebInterface(resource.Resource):
785             def __init__(self, func, mime_type, *fields):
786                 self.func, self.mime_type, self.fields = func, mime_type, fields
787             
788             def render_GET(self, request):
789                 request.setHeader('Content-Type', self.mime_type)
790                 request.setHeader('Access-Control-Allow-Origin', '*')
791                 return self.func(*(request.args[field][0] for field in self.fields))
792         
793         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
794         web_root.putChild('users', WebInterface(get_users, 'application/json'))
795         web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
796         web_root.putChild('current_payouts', WebInterface(get_current_payouts, 'application/json'))
797         web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain', 'total'))
798         web_root.putChild('global_stats', WebInterface(get_global_stats, 'application/json'))
799         web_root.putChild('local_stats', WebInterface(get_local_stats, 'application/json'))
800         web_root.putChild('peer_addresses', WebInterface(get_peer_addresses, 'text/plain'))
801         web_root.putChild('payout_addr', WebInterface(lambda: json.dumps(bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)), 'application/json'))
802         web_root.putChild('recent_blocks', WebInterface(lambda: json.dumps(recent_blocks), 'application/json'))
803         web_root.putChild('uptime', WebInterface(get_uptime, 'application/json'))
804         if draw is not None:
805             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
806         
807         new_root = resource.Resource()
808         web_root.putChild('web', new_root)
809         
810         stat_log = []
811         if os.path.exists(os.path.join(datadir_path, 'stats')):
812             try:
813                 with open(os.path.join(datadir_path, 'stats'), 'rb') as f:
814                     stat_log = json.loads(f.read())
815             except:
816                 log.err(None, 'Error loading stats:')
817         def update_stat_log():
818             while stat_log and stat_log[0]['time'] < time.time() - 24*60*60:
819                 stat_log.pop(0)
820             
821             lookbehind = 3600//net.SHARE_PERIOD
822             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
823                 return None
824             
825             global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
826             (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
827             
828             miner_hash_rates = {}
829             miner_dead_hash_rates = {}
830             datums, dt = local_rate_monitor.get_datums_in_last()
831             for datum in datums:
832                 miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
833                 if datum['dead']:
834                     miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
835             
836             stat_log.append(dict(
837                 time=time.time(),
838                 pool_hash_rate=p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)/(1-global_stale_prop),
839                 pool_stale_prop=global_stale_prop,
840                 local_hash_rates=miner_hash_rates,
841                 local_dead_hash_rates=miner_dead_hash_rates,
842                 shares=shares,
843                 stale_shares=stale_orphan_shares + stale_doa_shares,
844                 stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares),
845                 current_payout=get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8,
846             ))
847             
848             with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
849                 f.write(json.dumps(stat_log))
850         task.LoopingCall(update_stat_log).start(5*60)
851         new_root.putChild('log', WebInterface(lambda: json.dumps(stat_log), 'application/json'))
852         
853         grapher = graphs.Grapher(os.path.join(datadir_path, 'rrd'))
854         web_root.putChild('graphs', grapher.get_resource())
855         def add_point():
856             if tracker.get_height(current_work.value['best_share_hash']) < 720:
857                 return
858             nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
859             poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
860             grapher.add_poolrate_point(poolrate, poolrate - nonstalerate)
861         task.LoopingCall(add_point).start(100)
862         
863         def attempt_listen():
864             try:
865                 reactor.listenTCP(worker_endpoint[1], server.Site(web_root), interface=worker_endpoint[0])
866             except error.CannotListenError, e:
867                 print >>sys.stderr, 'Error binding to worker port: %s. Retrying in 1 second.' % (e.socketError,)
868                 reactor.callLater(1, attempt_listen)
869             else:
870                 with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
871                     pass
872         attempt_listen()
873         
874         print '    ...success!'
875         print
876         
877         
878         @defer.inlineCallbacks
879         def work_poller():
880             while True:
881                 flag = factory.new_block.get_deferred()
882                 try:
883                     yield set_real_work1()
884                 except:
885                     log.err()
886                 yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
887         work_poller()
888         
889         
890         # done!
891         print 'Started successfully!'
892         print
893         
894         
895         if hasattr(signal, 'SIGALRM'):
896             signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
897                 sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
898             ))
899             signal.siginterrupt(signal.SIGALRM, False)
900             task.LoopingCall(signal.alarm, 30).start(1)
901         
902         if args.irc_announce:
903             from twisted.words.protocols import irc
904             class IRCClient(irc.IRCClient):
905                 nickname = 'p2pool%02i' % (random.randrange(100),)
906                 channel = '#p2pool' if net.NAME == 'bitcoin' else '#p2pool-alt'
907                 def lineReceived(self, line):
908                     print repr(line)
909                     irc.IRCClient.lineReceived(self, line)
910                 def signedOn(self):
911                     irc.IRCClient.signedOn(self)
912                     self.factory.resetDelay()
913                     self.join(self.channel)
914                     self.watch_id = tracker.verified.added.watch(self._new_share)
915                     self.announced_hashes = set()
916                     self.delayed_messages = {}
917                 def privmsg(self, user, channel, message):
918                     if channel == self.channel and message in self.delayed_messages:
919                         self.delayed_messages.pop(message).cancel()
920                 def _new_share(self, share):
921                     if share.pow_hash <= share.header['bits'].target and share.header_hash not in self.announced_hashes and abs(share.timestamp - time.time()) < 10*60:
922                         self.announced_hashes.add(share.header_hash)
923                         message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
924                         self.delayed_messages[message] = reactor.callLater(random.expovariate(1/5), lambda: (self.say(self.channel, message), self.delayed_messages.pop(message)))
925                 def connectionLost(self, reason):
926                     tracker.verified.added.unwatch(self.watch_id)
927                     print 'IRC connection lost:', reason.getErrorMessage()
928             class IRCClientFactory(protocol.ReconnectingClientFactory):
929                 protocol = IRCClient
930             reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())
931         
932         @defer.inlineCallbacks
933         def status_thread():
934             last_str = None
935             last_time = 0
936             while True:
937                 yield deferral.sleep(3)
938                 try:
939                     if time.time() > current_work2.value['last_update'] + 60:
940                         print >>sys.stderr, '''---> LOST CONTACT WITH BITCOIND for %s! Check that it isn't frozen or dead! <---''' % (math.format_dt(time.time() - current_work2.value['last_update']),)
941                     
942                     height = tracker.get_height(current_work.value['best_share_hash'])
943                     this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
944                         height,
945                         len(tracker.verified.shares),
946                         len(tracker.shares),
947                         len(p2p_node.peers),
948                         sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
949                     ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
950                     
951                     datums, dt = local_rate_monitor.get_datums_in_last()
952                     my_att_s = sum(datum['work']/dt for datum in datums)
953                     this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
954                         math.format(int(my_att_s)),
955                         math.format_dt(dt),
956                         math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95),
957                         math.format_dt(2**256 / tracker.shares[current_work.value['best_share_hash']].target / my_att_s) if my_att_s else '???',
958                     )
959                     
960                     if height > 2:
961                         (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
962                         stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], min(720, height))
963                         real_att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720)) / (1 - stale_prop)
964                         
965                         this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % (
966                             shares, stale_orphan_shares, stale_doa_shares,
967                             math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
968                             math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
969                             get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8, net.PARENT.SYMBOL,
970                         )
971                         this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
972                             math.format(int(real_att_s)),
973                             100*stale_prop,
974                             math.format_dt(2**256 / current_work.value['bits'].target / real_att_s),
975                         )
976                     
977                     if this_str != last_str or time.time() > last_time + 15:
978                         print this_str
979                         last_str = this_str
980                         last_time = time.time()
981                 except:
982                     log.err()
983         status_thread()
984     except:
985         log.err(None, 'Fatal error:')
986         reactor.stop()
987
988 def run():
989     class FixedArgumentParser(argparse.ArgumentParser):
990         def _read_args_from_files(self, arg_strings):
991             # expand arguments referencing files
992             new_arg_strings = []
993             for arg_string in arg_strings:
994                 
995                 # for regular arguments, just add them back into the list
996                 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
997                     new_arg_strings.append(arg_string)
998                 
999                 # replace arguments referencing files with the file content
1000                 else:
1001                     try:
1002                         args_file = open(arg_string[1:])
1003                         try:
1004                             arg_strings = []
1005                             for arg_line in args_file.read().splitlines():
1006                                 for arg in self.convert_arg_line_to_args(arg_line):
1007                                     arg_strings.append(arg)
1008                             arg_strings = self._read_args_from_files(arg_strings)
1009                             new_arg_strings.extend(arg_strings)
1010                         finally:
1011                             args_file.close()
1012                     except IOError:
1013                         err = sys.exc_info()[1]
1014                         self.error(str(err))
1015             
1016             # return the modified argument list
1017             return new_arg_strings
1018         
1019         def convert_arg_line_to_args(self, arg_line):
1020             return [arg for arg in arg_line.split() if arg.strip()]
1021     
1022     
1023     realnets=dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
1024     
1025     parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
1026     parser.add_argument('--version', action='version', version=p2pool.__version__)
1027     parser.add_argument('--net',
1028         help='use specified network (default: bitcoin)',
1029         action='store', choices=sorted(realnets), default='bitcoin', dest='net_name')
1030     parser.add_argument('--testnet',
1031         help='''use the network's testnet''',
1032         action='store_const', const=True, default=False, dest='testnet')
1033     parser.add_argument('--debug',
1034         help='enable debugging mode',
1035         action='store_const', const=True, default=False, dest='debug')
1036     parser.add_argument('-a', '--address',
1037         help='generate payouts to this address (default: <address requested from bitcoind>)',
1038         type=str, action='store', default=None, dest='address')
1039     parser.add_argument('--datadir',
1040         help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
1041         type=str, action='store', default=None, dest='datadir')
1042     parser.add_argument('--logfile',
1043         help='''log to this file (default: data/<NET>/log)''',
1044         type=str, action='store', default=None, dest='logfile')
1045     parser.add_argument('--merged',
1046         help='call getauxblock on this url to get work for merged mining (example: http://ncuser:ncpass@127.0.0.1:10332/)',
1047         type=str, action='append', default=[], dest='merged_urls')
1048     parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
1049         help='donate this percentage of work to author of p2pool (default: 0.5)',
1050         type=float, action='store', default=0.5, dest='donation_percentage')
1051     parser.add_argument('--irc-announce',
1052         help='announce any blocks found on irc://irc.freenode.net/#p2pool',
1053         action='store_true', default=False, dest='irc_announce')
1054     
1055     p2pool_group = parser.add_argument_group('p2pool interface')
1056     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
1057         help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
1058         type=int, action='store', default=None, dest='p2pool_port')
1059     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
1060         help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
1061         type=str, action='append', default=[], dest='p2pool_nodes')
1062     parser.add_argument('--disable-upnp',
1063         help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
1064         action='store_false', default=True, dest='upnp')
1065     
1066     worker_group = parser.add_argument_group('worker interface')
1067     worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT',
1068         help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
1069         type=str, action='store', default=None, dest='worker_endpoint')
1070     worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
1071         help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
1072         type=float, action='store', default=0, dest='worker_fee')
1073     
1074     bitcoind_group = parser.add_argument_group('bitcoind interface')
1075     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
1076         help='connect to this address (default: 127.0.0.1)',
1077         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
1078     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
1079         help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
1080         type=int, action='store', default=None, dest='bitcoind_rpc_port')
1081     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
1082         help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
1083         type=int, action='store', default=None, dest='bitcoind_p2p_port')
1084     
1085     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
1086         help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
1087         type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
1088     
1089     args = parser.parse_args()
1090     
1091     if args.debug:
1092         p2pool.DEBUG = True
1093     
1094     net_name = args.net_name + ('_testnet' if args.testnet else '')
1095     net = networks.nets[net_name]
1096     
1097     datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
1098     if not os.path.exists(datadir_path):
1099         os.makedirs(datadir_path)
1100     
1101     if len(args.bitcoind_rpc_userpass) > 2:
1102         parser.error('a maximum of two arguments are allowed')
1103     args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
1104     
1105     if args.bitcoind_rpc_password is None:
1106         if not hasattr(net.PARENT, 'CONF_FILE_FUNC'):
1107             parser.error('This network has no configuration file function. Manually enter your RPC password.')
1108         conf_path = net.PARENT.CONF_FILE_FUNC()
1109         if not os.path.exists(conf_path):
1110             parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
1111                 '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
1112                 '''\r\n'''
1113                 '''server=1\r\n'''
1114                 '''rpcpassword=%x''' % (conf_path, random.randrange(2**128)))
1115         with open(conf_path, 'rb') as f:
1116             cp = ConfigParser.RawConfigParser()
1117             cp.readfp(StringIO.StringIO('[x]\r\n' + f.read()))
1118             for conf_name, var_name, var_type in [
1119                 ('rpcuser', 'bitcoind_rpc_username', str),
1120                 ('rpcpassword', 'bitcoind_rpc_password', str),
1121                 ('rpcport', 'bitcoind_rpc_port', int),
1122                 ('port', 'bitcoind_p2p_port', int),
1123             ]:
1124                 if getattr(args, var_name) is None and cp.has_option('x', conf_name):
1125                     setattr(args, var_name, var_type(cp.get('x', conf_name)))
1126     
1127     if args.bitcoind_rpc_username is None:
1128         args.bitcoind_rpc_username = ''
1129     
1130     if args.bitcoind_rpc_port is None:
1131         args.bitcoind_rpc_port = net.PARENT.RPC_PORT
1132     
1133     if args.bitcoind_p2p_port is None:
1134         args.bitcoind_p2p_port = net.PARENT.P2P_PORT
1135     
1136     if args.p2pool_port is None:
1137         args.p2pool_port = net.P2P_PORT
1138     
1139     if args.worker_endpoint is None:
1140         worker_endpoint = '', net.WORKER_PORT
1141     elif ':' not in args.worker_endpoint:
1142         worker_endpoint = '', int(args.worker_endpoint)
1143     else:
1144         addr, port = args.worker_endpoint.rsplit(':', 1)
1145         worker_endpoint = addr, int(port)
1146     
1147     if args.address is not None:
1148         try:
1149             args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
1150         except Exception, e:
1151             parser.error('error parsing address: ' + repr(e))
1152     else:
1153         args.pubkey_hash = None
1154     
1155     def separate_url(url):
1156         s = urlparse.urlsplit(url)
1157         if '@' not in s.netloc:
1158             parser.error('merged url netloc must contain an "@"')
1159         userpass, new_netloc = s.netloc.rsplit('@', 1)
1160         return urlparse.urlunsplit(s._replace(netloc=new_netloc)), userpass
1161     merged_urls = map(separate_url, args.merged_urls)
1162     
1163     if args.logfile is None:
1164         args.logfile = os.path.join(datadir_path, 'log')
1165     
1166     logfile = logging.LogFile(args.logfile)
1167     pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
1168     sys.stdout = logging.AbortPipe(pipe)
1169     sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
1170     if hasattr(signal, "SIGUSR1"):
1171         def sigusr1(signum, frame):
1172             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
1173             logfile.reopen()
1174             print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
1175         signal.signal(signal.SIGUSR1, sigusr1)
1176     task.LoopingCall(logfile.reopen).start(5)
1177     
1178     reactor.callWhenRunning(main, args, net, datadir_path, merged_urls, worker_endpoint)
1179     reactor.run()