handle work results by using saved closures instead of large tuples
[p2pool.git] / p2pool / main.py
1 from __future__ import division
2
3 import ConfigParser
4 import StringIO
5 import argparse
6 import os
7 import random
8 import struct
9 import sys
10 import time
11 import json
12 import signal
13 import traceback
14 import urlparse
15
16 from twisted.internet import defer, error, reactor, protocol, task
17 from twisted.web import server, resource
18 from twisted.python import log
19 from nattraverso import portmapper, ipdiscover
20
21 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
22 from bitcoin import worker_interface
23 from util import expiring_dict, jsonrpc, variable, deferral, math, logging, pack
24 from . import p2p, networks, graphs
25 import p2pool, p2pool.data as p2pool_data
26
27 @deferral.retry('Error getting work from bitcoind:', 3)
28 @defer.inlineCallbacks
29 def getwork(bitcoind):
30     work = yield bitcoind.rpc_getmemorypool()
31     packed_transactions = [x.decode('hex') for x in work['transactions']]
32     defer.returnValue(dict(
33         version=work['version'],
34         previous_block_hash=int(work['previousblockhash'], 16),
35         transactions=map(bitcoin_data.tx_type.unpack, packed_transactions),
36         merkle_branch=bitcoin_data.calculate_merkle_branch([0] + map(bitcoin_data.hash256, packed_transactions), 0),
37         subsidy=work['coinbasevalue'],
38         time=work['time'],
39         bits=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
40         coinbaseflags=work['coinbaseflags'].decode('hex') if 'coinbaseflags' in work else ''.join(x.decode('hex') for x in work['coinbaseaux'].itervalues()) if 'coinbaseaux' in work else '',
41     ))
42
43 @defer.inlineCallbacks
44 def main(args, net, datadir_path, merged_urls):
45     try:
46         print 'p2pool (version %s)' % (p2pool.__version__,)
47         print
48         try:
49             from . import draw
50         except ImportError:
51             draw = None
52             print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
53             print
54         
55         # connect to bitcoind over JSON-RPC and do initial getmemorypool
56         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
57         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
58         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
59         good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.PARENT.RPC_CHECK)(bitcoind)
60         if not good:
61             print >>sys.stderr, "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
62             return
63         temp_work = yield getwork(bitcoind)
64         print '    ...success!'
65         print '    Current block hash: %x' % (temp_work['previous_block_hash'],)
66         print
67         
68         # connect to bitcoind over bitcoin-p2p
69         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
70         factory = bitcoin_p2p.ClientFactory(net.PARENT)
71         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
72         yield factory.getProtocol() # waits until handshake is successful
73         print '    ...success!'
74         print
75         
76         print 'Determining payout script...'
77         if args.pubkey_hash is None:
78             address_path = os.path.join(datadir_path, 'cached_payout_address')
79             
80             if os.path.exists(address_path):
81                 with open(address_path, 'rb') as f:
82                     address = f.read().strip('\r\n')
83                 print '    Loaded cached address: %s...' % (address,)
84             else:
85                 address = None
86             
87             if address is not None:
88                 res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))()
89                 if not res['isvalid'] or not res['ismine']:
90                     print '    Cached address is either invalid or not controlled by local bitcoind!'
91                     address = None
92             
93             if address is None:
94                 print '    Getting payout address from bitcoind...'
95                 address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()
96             
97             with open(address_path, 'wb') as f:
98                 f.write(address)
99             
100             my_script = bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(address, net.PARENT))
101         else:
102             print '    ...Computing payout script from provided address...'
103             my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
104         print '    ...success! Payout script:', bitcoin_data.script2_to_human(my_script, net.PARENT)
105         print
106         
107         my_share_hashes = set()
108         my_doa_share_hashes = set()
109         
110         tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes)
111         shared_share_hashes = set()
112         ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
113         known_verified = set()
114         recent_blocks = []
115         print "Loading shares..."
116         for i, (mode, contents) in enumerate(ss.get_shares()):
117             if mode == 'share':
118                 if contents.hash in tracker.shares:
119                     continue
120                 shared_share_hashes.add(contents.hash)
121                 contents.time_seen = 0
122                 tracker.add(contents)
123                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
124                     print "    %i" % (len(tracker.shares),)
125             elif mode == 'verified_hash':
126                 known_verified.add(contents)
127             else:
128                 raise AssertionError()
129         print "    ...inserting %i verified shares..." % (len(known_verified),)
130         for h in known_verified:
131             if h not in tracker.shares:
132                 ss.forget_verified_share(h)
133                 continue
134             tracker.verified.add(tracker.shares[h])
135         print "    ...done loading %i shares!" % (len(tracker.shares),)
136         print
137         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
138         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
139         tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
140         
141         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
142         
143         pre_current_work = variable.Variable(None)
144         pre_merged_work = variable.Variable({})
145         # information affecting work that should trigger a long-polling update
146         current_work = variable.Variable(None)
147         # information affecting work that should not trigger a long-polling update
148         current_work2 = variable.Variable(None)
149         
150         requested = expiring_dict.ExpiringDict(300)
151         
152         @defer.inlineCallbacks
153         def set_real_work1():
154             work = yield getwork(bitcoind)
155             current_work2.set(dict(
156                 time=work['time'],
157                 transactions=work['transactions'],
158                 merkle_branch=work['merkle_branch'],
159                 subsidy=work['subsidy'],
160                 clock_offset=time.time() - work['time'],
161                 last_update=time.time(),
162             )) # second set first because everything hooks on the first
163             pre_current_work.set(dict(
164                 version=work['version'],
165                 previous_block=work['previous_block_hash'],
166                 bits=work['bits'],
167                 coinbaseflags=work['coinbaseflags'],
168             ))
169         
170         if '\ngetblock ' in (yield bitcoind.rpc_help()):
171             height_cacher = deferral.DeferredCacher(defer.inlineCallbacks(lambda block_hash: defer.returnValue((yield bitcoind.rpc_getblock('%x' % (block_hash,)))['blockcount'])))
172             def get_height_rel_highest(block_hash):
173                 return height_cacher.call_now(block_hash, 0) - height_cacher.call_now(pre_current_work.value['previous_block'], 1000000000)
174         else:
175             get_height_rel_highest = bitcoin_p2p.HeightTracker(bitcoind, factory).get_height_rel_highest
176         
177         def set_real_work2():
178             best, desired = tracker.think(get_height_rel_highest, pre_current_work.value['previous_block'], pre_current_work.value['bits'])
179             
180             t = dict(pre_current_work.value)
181             t['best_share_hash'] = best
182             t['mm_chains'] = pre_merged_work.value
183             current_work.set(t)
184             
185             t = time.time()
186             for peer2, share_hash in desired:
187                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
188                     continue
189                 last_request_time, count = requested.get(share_hash, (None, 0))
190                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
191                     continue
192                 potential_peers = set()
193                 for head in tracker.tails[share_hash]:
194                     potential_peers.update(peer_heads.get(head, set()))
195                 potential_peers = [peer for peer in potential_peers if peer.connected2]
196                 if count == 0 and peer2 is not None and peer2.connected2:
197                     peer = peer2
198                 else:
199                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
200                     if peer is None:
201                         continue
202                 
203                 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
204                 peer.send_getshares(
205                     hashes=[share_hash],
206                     parents=2000,
207                     stops=list(set(tracker.heads) | set(
208                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
209                     ))[:100],
210                 )
211                 requested[share_hash] = t, count + 1
212         pre_current_work.changed.watch(lambda _: set_real_work2())
213         
214         print 'Initializing work...'
215         yield set_real_work1()
216         print '    ...success!'
217         print
218         
219         pre_merged_work.changed.watch(lambda _: set_real_work2())
220         
221         
222         @defer.inlineCallbacks
223         def set_merged_work(merged_url, merged_userpass):
224             merged_proxy = jsonrpc.Proxy(merged_url, (merged_userpass,))
225             while True:
226                 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged_proxy.rpc_getauxblock)()
227                 pre_merged_work.set(dict(pre_merged_work.value, **{auxblock['chainid']: dict(
228                     hash=int(auxblock['hash'], 16),
229                     target=pack.IntType(256).unpack(auxblock['target'].decode('hex')),
230                     merged_proxy=merged_proxy,
231                 )}))
232                 yield deferral.sleep(1)
233         for merged_url, merged_userpass in merged_urls:
234             set_merged_work(merged_url, merged_userpass)
235         
236         @pre_merged_work.changed.watch
237         def _(new_merged_work):
238             print 'Got new merged mining work!'
239         
240         # setup p2p logic and join p2pool network
241         
242         class Node(p2p.Node):
243             def handle_shares(self, shares, peer):
244                 if len(shares) > 5:
245                     print 'Processing %i shares from %s...' % (len(shares), '%s:%i' % peer.addr if peer is not None else None)
246                 
247                 new_count = 0
248                 for share in shares:
249                     if share.hash in tracker.shares:
250                         #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
251                         continue
252                     
253                     new_count += 1
254                     
255                     #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
256                     
257                     tracker.add(share)
258                 
259                 if shares and peer is not None:
260                     peer_heads.setdefault(shares[0].hash, set()).add(peer)
261                 
262                 if new_count:
263                     set_real_work2()
264                 
265                 if len(shares) > 5:
266                     print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
267             
268             def handle_share_hashes(self, hashes, peer):
269                 t = time.time()
270                 get_hashes = []
271                 for share_hash in hashes:
272                     if share_hash in tracker.shares:
273                         continue
274                     last_request_time, count = requested.get(share_hash, (None, 0))
275                     if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
276                         continue
277                     print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
278                     get_hashes.append(share_hash)
279                     requested[share_hash] = t, count + 1
280                 
281                 if hashes and peer is not None:
282                     peer_heads.setdefault(hashes[0], set()).add(peer)
283                 if get_hashes:
284                     peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
285             
286             def handle_get_shares(self, hashes, parents, stops, peer):
287                 parents = min(parents, 1000//len(hashes))
288                 stops = set(stops)
289                 shares = []
290                 for share_hash in hashes:
291                     for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))):
292                         if share.hash in stops:
293                             break
294                         shares.append(share)
295                 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
296                 peer.sendShares(shares)
297         
298         @tracker.verified.added.watch
299         def _(share):
300             if share.pow_hash <= share.header['bits'].target:
301                 if factory.conn.value is not None:
302                     factory.conn.value.send_block(block=share.as_block(tracker))
303                 else:
304                     print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Erp!'
305                 print
306                 print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %x' % (p2pool_data.format_hash(share.hash), share.header_hash)
307                 print
308                 recent_blocks.append({ 'ts': share.timestamp, 'hash': '%x' % (share.header_hash) })
309         
310         print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
311         
312         @defer.inlineCallbacks
313         def parse(x):
314             if ':' in x:
315                 ip, port = x.split(':')
316                 defer.returnValue(((yield reactor.resolve(ip)), int(port)))
317             else:
318                 defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT))
319         
320         addrs = {}
321         if os.path.exists(os.path.join(datadir_path, 'addrs.txt')):
322             try:
323                 addrs.update(dict(eval(x) for x in open(os.path.join(datadir_path, 'addrs.txt'))))
324             except:
325                 print >>sys.stderr, "error reading addrs"
326         for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
327             try:
328                 addr = yield addr_df
329                 if addr not in addrs:
330                     addrs[addr] = (0, time.time(), time.time())
331             except:
332                 log.err()
333         
334         connect_addrs = set()
335         for addr_df in map(parse, args.p2pool_nodes):
336             try:
337                 connect_addrs.add((yield addr_df))
338             except:
339                 log.err()
340         
341         p2p_node = Node(
342             best_share_hash_func=lambda: current_work.value['best_share_hash'],
343             port=args.p2pool_port,
344             net=net,
345             addr_store=addrs,
346             connect_addrs=connect_addrs,
347         )
348         p2p_node.start()
349         
350         def save_addrs():
351             open(os.path.join(datadir_path, 'addrs.txt'), 'w').writelines(repr(x) + '\n' for x in p2p_node.addr_store.iteritems())
352         task.LoopingCall(save_addrs).start(60)
353         
354         # send share when the chain changes to their chain
355         def work_changed(new_work):
356             #print 'Work changed:', new_work
357             shares = []
358             for share in tracker.get_chain(new_work['best_share_hash'], min(5, tracker.get_height(new_work['best_share_hash']))):
359                 if share.hash in shared_share_hashes:
360                     break
361                 shared_share_hashes.add(share.hash)
362                 shares.append(share)
363             
364             for peer in p2p_node.peers.itervalues():
365                 peer.sendShares([share for share in shares if share.peer is not peer])
366         
367         current_work.changed.watch(work_changed)
368         
369         def save_shares():
370             for share in tracker.get_chain(current_work.value['best_share_hash'], min(tracker.get_height(current_work.value['best_share_hash']), 2*net.CHAIN_LENGTH)):
371                 ss.add_share(share)
372                 if share.hash in tracker.verified.shares:
373                     ss.add_verified_hash(share.hash)
374         task.LoopingCall(save_shares).start(60)
375         
376         print '    ...success!'
377         print
378         
379         start_time = time.time()
380         
381         @defer.inlineCallbacks
382         def upnp_thread():
383             while True:
384                 try:
385                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
386                     if is_lan:
387                         pm = yield portmapper.get_port_mapper()
388                         yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
389                 except defer.TimeoutError:
390                     pass
391                 except:
392                     if p2pool.DEBUG:
393                         log.err(None, "UPnP error:")
394                 yield deferral.sleep(random.expovariate(1/120))
395         
396         if args.upnp:
397             upnp_thread()
398         
399         # start listening for workers with a JSON-RPC server
400         
401         print 'Listening for workers on port %i...' % (args.worker_port,)
402         
403         if os.path.exists(os.path.join(datadir_path, 'vip_pass')):
404             with open(os.path.join(datadir_path, 'vip_pass'), 'rb') as f:
405                 vip_pass = f.read().strip('\r\n')
406         else:
407             vip_pass = '%016x' % (random.randrange(2**64),)
408             with open(os.path.join(datadir_path, 'vip_pass'), 'wb') as f:
409                 f.write(vip_pass)
410         print '    Worker password:', vip_pass, '(only required for generating graphs)'
411         
412         # setup worker logic
413         
414         removed_unstales_var = variable.Variable((0, 0, 0))
415         @tracker.verified.removed.watch
416         def _(share):
417             if share.hash in my_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
418                 assert share.share_data['stale_info'] in [0, 253, 254] # we made these shares in this instance
419                 removed_unstales_var.set((
420                     removed_unstales_var.value[0] + 1,
421                     removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 253 else 0),
422                     removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 254 else 0),
423                 ))
424         
425         removed_doa_unstales_var = variable.Variable(0)
426         @tracker.verified.removed.watch
427         def _(share):
428             if share.hash in my_doa_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
429                 removed_doa_unstales.set(removed_doa_unstales.value + 1)
430         
431         def get_stale_counts():
432             '''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)'''
433             my_shares = len(my_share_hashes)
434             my_doa_shares = len(my_doa_share_hashes)
435             delta = tracker.verified.get_delta(current_work.value['best_share_hash'])
436             my_shares_in_chain = delta.my_count + removed_unstales_var.value[0]
437             my_doa_shares_in_chain = delta.my_doa_count + removed_doa_unstales_var.value
438             orphans_recorded_in_chain = delta.my_orphan_announce_count + removed_unstales_var.value[1]
439             doas_recorded_in_chain = delta.my_dead_announce_count + removed_unstales_var.value[2]
440             
441             my_shares_not_in_chain = my_shares - my_shares_in_chain
442             my_doa_shares_not_in_chain = my_doa_shares - my_doa_shares_in_chain
443             
444             return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
445         
446         
447         recent_shares_ts_work2 = []
448         
449         class WorkerBridge(worker_interface.WorkerBridge):
450             def __init__(self):
451                 worker_interface.WorkerBridge.__init__(self)
452                 self.new_work_event = current_work.changed
453                 self.recent_shares_ts_work = []
454             
455             def _get_payout_script_from_username(self, user):
456                 if user is None:
457                     return None
458                 try:
459                     pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, net.PARENT)
460                 except: # XXX blah
461                     return None
462                 return bitcoin_data.pubkey_hash_to_script2(pubkey_hash)
463             
464             def preprocess_request(self, request):
465                 payout_script = self._get_payout_script_from_username(request.getUser())
466                 if payout_script is None or random.uniform(0, 100) < args.worker_fee:
467                     payout_script = my_script
468                 return payout_script,
469             
470             def get_work(self, payout_script):
471                 if len(p2p_node.peers) == 0 and net.PERSIST:
472                     raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
473                 if current_work.value['best_share_hash'] is None and net.PERSIST:
474                     raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
475                 if time.time() > current_work2.value['last_update'] + 60:
476                     raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
477                 
478                 if current_work.value['mm_chains']:
479                     tree, size = bitcoin_data.make_auxpow_tree(current_work.value['mm_chains'])
480                     mm_hashes = [current_work.value['mm_chains'].get(tree.get(i), dict(hash=0))['hash'] for i in xrange(size)]
481                     mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(dict(
482                         merkle_root=bitcoin_data.merkle_hash(mm_hashes),
483                         size=size,
484                         nonce=0,
485                     ))
486                     mm_later = [(aux_work, mm_hashes.index(aux_work['hash']), mm_hashes) for chain_id, aux_work in current_work.value['mm_chains'].iteritems()]
487                 else:
488                     mm_data = ''
489                     mm_later = []
490                 
491                 share_info, generate_tx = p2pool_data.generate_transaction(
492                     tracker=tracker,
493                     share_data=dict(
494                         previous_share_hash=current_work.value['best_share_hash'],
495                         coinbase=(mm_data + current_work.value['coinbaseflags'])[:100],
496                         nonce=struct.pack('<Q', random.randrange(2**64)),
497                         new_script=payout_script,
498                         subsidy=current_work2.value['subsidy'],
499                         donation=math.perfect_round(65535*args.donation_percentage/100),
500                         stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
501                             253 if orphans > orphans_recorded_in_chain else
502                             254 if doas > doas_recorded_in_chain else
503                             0
504                         )(*get_stale_counts()),
505                     ),
506                     block_target=current_work.value['bits'].target,
507                     desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
508                     net=net,
509                 )
510                 
511                 target = 2**256//2**32 - 1
512                 if len(self.recent_shares_ts_work) == 50:
513                     hash_rate = sum(work for ts, work in self.recent_shares_ts_work)//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
514                     target = min(target, 2**256//(hash_rate * 5))
515                 target = max(target, share_info['bits'].target)
516                 for aux_work in current_work.value['mm_chains'].itervalues():
517                     target = max(target, aux_work['target'])
518                 
519                 transactions = [generate_tx] + list(current_work2.value['transactions'])
520                 merkle_root = bitcoin_data.check_merkle_branch(bitcoin_data.hash256(bitcoin_data.tx_type.pack(generate_tx)), 0, current_work2.value['merkle_branch'])
521                 
522                 getwork_time = time.time()
523                 merkle_branch = current_work2.value['merkle_branch']
524                 
525                 print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % (
526                     bitcoin_data.target_to_difficulty(target),
527                     bitcoin_data.target_to_difficulty(share_info['bits'].target),
528                     current_work2.value['subsidy']*1e-8, net.PARENT.SYMBOL,
529                     len(current_work2.value['transactions']),
530                 )
531                 
532                 ba = bitcoin_getwork.BlockAttempt(
533                     version=current_work.value['version'],
534                     previous_block=current_work.value['previous_block'],
535                     merkle_root=merkle_root,
536                     timestamp=current_work2.value['time'],
537                     bits=current_work.value['bits'],
538                     share_target=target,
539                 )
540                 
541                 def got_response(header, request):
542                     assert header['merkle_root'] == merkle_root
543                     
544                     pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
545                     on_time = current_work.value['best_share_hash'] == share_info['share_data']['previous_share_hash']
546                     
547                     try:
548                         if pow_hash <= header['bits'].target or p2pool.DEBUG:
549                             if factory.conn.value is not None:
550                                 factory.conn.value.send_block(block=dict(header=header, txs=transactions))
551                             else:
552                                 print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Erp!'
553                             if pow_hash <= header['bits'].target:
554                                 print
555                                 print 'GOT BLOCK FROM MINER! Passing to bitcoind! bitcoin: %x' % (bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)),)
556                                 print
557                                 recent_blocks.append({ 'ts': time.time(), 'hash': '%x' % (bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)),) })
558                     except:
559                         log.err(None, 'Error while processing potential block:')
560                     
561                     for aux_work, index, hashes in mm_later:
562                         try:
563                             if pow_hash <= aux_work['target'] or p2pool.DEBUG:
564                                 df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)(
565                                     pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
566                                     bitcoin_data.aux_pow_type.pack(dict(
567                                         merkle_tx=dict(
568                                             tx=transactions[0],
569                                             block_hash=bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)),
570                                             merkle_branch=merkle_branch,
571                                             index=0,
572                                         ),
573                                         merkle_branch=bitcoin_data.calculate_merkle_branch(hashes, index),
574                                         index=index,
575                                         parent_block_header=header,
576                                     )).encode('hex'),
577                                 )
578                                 @df.addCallback
579                                 def _(result):
580                                     if result != (pow_hash <= aux_work['target']):
581                                         print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
582                                     else:
583                                         print 'Merged block submittal result: %s' % (result,)
584                                 @df.addErrback
585                                 def _(err):
586                                     log.err(err, 'Error submitting merged block:')
587                         except:
588                             log.err(None, 'Error while processing merged mining POW:')
589                     
590                     if pow_hash <= share_info['bits'].target:
591                         share = p2pool_data.Share(net, header, share_info, merkle_branch=merkle_branch, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
592                         print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
593                             request.getUser(),
594                             p2pool_data.format_hash(share.hash),
595                             p2pool_data.format_hash(share.previous_hash),
596                             time.time() - getwork_time,
597                             ' DEAD ON ARRIVAL' if not on_time else '',
598                         )
599                         my_share_hashes.add(share.hash)
600                         if not on_time:
601                             my_doa_share_hashes.add(share.hash)
602                         
603                         tracker.add(share)
604                         if not p2pool.DEBUG:
605                             tracker.verified.add(share)
606                         set_real_work2()
607                         
608                         try:
609                             if pow_hash <= header['bits'].target or p2pool.DEBUG:
610                                 for peer in p2p_node.peers.itervalues():
611                                     peer.sendShares([share])
612                                 shared_share_hashes.add(share.hash)
613                         except:
614                             log.err(None, 'Error forwarding block solution:')
615                     
616                     if pow_hash <= target:
617                         reactor.callLater(1, grapher.add_localrate_point, bitcoin_data.target_to_average_attempts(target), not on_time)
618                         if request.getPassword() == vip_pass:
619                             reactor.callLater(1, grapher.add_localminer_point, request.getUser(), bitcoin_data.target_to_average_attempts(target), not on_time)
620                         self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
621                         while len(self.recent_shares_ts_work) > 50:
622                             self.recent_shares_ts_work.pop(0)
623                         recent_shares_ts_work2.append((time.time(), bitcoin_data.target_to_average_attempts(target), not on_time))
624                     
625                     
626                     if pow_hash > target:
627                         print 'Worker %s submitted share with hash > target:' % (request.getUser(),)
628                         print '    Hash:   %56x' % (pow_hash,)
629                         print '    Target: %56x' % (target,)
630                     
631                     return on_time
632                 
633                 return ba, got_response
634         
635         web_root = resource.Resource()
636         worker_interface.WorkerInterface(WorkerBridge()).attach_to(web_root)
637         
638         def get_rate():
639             if tracker.get_height(current_work.value['best_share_hash']) < 720:
640                 return json.dumps(None)
641             return json.dumps(p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
642                 / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720)))
643         
644         def get_users():
645             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
646             weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
647             res = {}
648             for script in sorted(weights, key=lambda s: weights[s]):
649                 res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
650             return json.dumps(res)
651         
652         def get_current_txouts():
653             share = tracker.shares[current_work.value['best_share_hash']]
654             share_info, gentx = p2pool_data.generate_transaction(tracker, share.share_info['share_data'], share.header['bits'].target, share.share_info['timestamp'], share.net)
655             return dict((out['script'], out['value']) for out in gentx['tx_outs'])
656         
657         def get_current_scaled_txouts(scale, trunc=0):
658             txouts = get_current_txouts()
659             total = sum(txouts.itervalues())
660             results = dict((script, value*scale//total) for script, value in txouts.iteritems())
661             if trunc > 0:
662                 total_random = 0
663                 random_set = set()
664                 for s in sorted(results, key=results.__getitem__):
665                     if results[s] >= trunc:
666                         break
667                     total_random += results[s]
668                     random_set.add(s)
669                 if total_random:
670                     winner = math.weighted_choice((script, results[script]) for script in random_set)
671                     for script in random_set:
672                         del results[script]
673                     results[winner] = total_random
674             if sum(results.itervalues()) < int(scale):
675                 results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
676             return results
677         
678         def get_current_payouts():
679             return json.dumps(dict((bitcoin_data.script2_to_human(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems()))
680         
681         def get_patron_sendmany(this):
682             try:
683                 if '/' in this:
684                     this, trunc = this.split('/', 1)
685                 else:
686                     trunc = '0.01'
687                 return json.dumps(dict(
688                     (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
689                     for script, value in get_current_scaled_txouts(scale=int(float(this)*1e8), trunc=int(float(trunc)*1e8)).iteritems()
690                     if bitcoin_data.script2_to_address(script, net.PARENT) is not None
691                 ))
692             except:
693                 return json.dumps(None)
694         
695         def get_global_stats():
696             # averaged over last hour
697             lookbehind = 3600//net.SHARE_PERIOD
698             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
699                 return None
700             
701             nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
702             stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
703             return json.dumps(dict(
704                 pool_nonstale_hash_rate=nonstale_hash_rate,
705                 pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
706                 pool_stale_prop=stale_prop,
707             ))
708         
709         def get_local_stats():
710             lookbehind = 3600//net.SHARE_PERIOD
711             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
712                 return None
713             
714             global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
715             
716             my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
717             my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
718             my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
719             my_share_count = my_unstale_count + my_orphan_count + my_doa_count
720             my_stale_count = my_orphan_count + my_doa_count
721             
722             my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
723             
724             my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
725                 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1)
726                 if share.hash in my_share_hashes)
727             actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp -
728                 tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp)
729             share_att_s = my_work / actual_time
730             
731             return json.dumps(dict(
732                 my_hash_rates_in_last_hour=dict(
733                     nonstale=share_att_s,
734                     rewarded=share_att_s/(1 - global_stale_prop),
735                     actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
736                 ),
737                 my_share_counts_in_last_hour=dict(
738                     shares=my_share_count,
739                     unstale_shares=my_unstale_count,
740                     stale_shares=my_stale_count,
741                     orphan_stale_shares=my_orphan_count,
742                     doa_stale_shares=my_doa_count,
743                 ),
744                 my_stale_proportions_in_last_hour=dict(
745                     stale=my_stale_prop,
746                     orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
747                     dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
748                 ),
749             ))
750         
751         def get_peer_addresses():
752             return ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues())
753         
754         def get_uptime():
755             return json.dumps(time.time() - start_time)
756         
757         class WebInterface(resource.Resource):
758             def __init__(self, func, mime_type, *fields):
759                 self.func, self.mime_type, self.fields = func, mime_type, fields
760             
761             def render_GET(self, request):
762                 request.setHeader('Content-Type', self.mime_type)
763                 request.setHeader('Access-Control-Allow-Origin', '*')
764                 return self.func(*(request.args[field][0] for field in self.fields))
765         
766         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
767         web_root.putChild('users', WebInterface(get_users, 'application/json'))
768         web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
769         web_root.putChild('current_payouts', WebInterface(get_current_payouts, 'application/json'))
770         web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain', 'total'))
771         web_root.putChild('global_stats', WebInterface(get_global_stats, 'application/json'))
772         web_root.putChild('local_stats', WebInterface(get_local_stats, 'application/json'))
773         web_root.putChild('peer_addresses', WebInterface(get_peer_addresses, 'text/plain'))
774         web_root.putChild('payout_addr', WebInterface(lambda: json.dumps(bitcoin_data.script2_to_human(my_script, net.PARENT)), 'application/json'))
775         web_root.putChild('recent_blocks', WebInterface(lambda: json.dumps(recent_blocks), 'application/json'))
776         web_root.putChild('uptime', WebInterface(get_uptime, 'application/json'))
777         if draw is not None:
778             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
779         
780         grapher = graphs.Grapher(os.path.join(datadir_path, 'rrd'))
781         web_root.putChild('graphs', grapher.get_resource())
782         def add_point():
783             if tracker.get_height(current_work.value['best_share_hash']) < 720:
784                 return
785             nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
786             poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
787             grapher.add_poolrate_point(poolrate, poolrate - nonstalerate)
788         task.LoopingCall(add_point).start(100)
789         
790         def attempt_listen():
791             try:
792                 reactor.listenTCP(args.worker_port, server.Site(web_root))
793             except error.CannotListenError, e:
794                 print >>sys.stderr, 'Error binding to worker port: %s. Retrying in 1 second.' % (e.socketError,)
795                 reactor.callLater(1, attempt_listen)
796             else:
797                 with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
798                     pass
799         attempt_listen()
800         
801         print '    ...success!'
802         print
803         
804         
805         @defer.inlineCallbacks
806         def work_poller():
807             while True:
808                 flag = factory.new_block.get_deferred()
809                 try:
810                     yield set_real_work1()
811                 except:
812                     log.err()
813                 yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
814         work_poller()
815         
816         
817         # done!
818         print 'Started successfully!'
819         print
820         
821         
822         if hasattr(signal, 'SIGALRM'):
823             signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
824                 sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
825             ))
826             signal.siginterrupt(signal.SIGALRM, False)
827             task.LoopingCall(signal.alarm, 30).start(1)
828         
829         if args.irc_announce:
830             from twisted.words.protocols import irc
831             class IRCClient(irc.IRCClient):
832                 nickname = 'p2pool'
833                 def lineReceived(self, line):
834                     print repr(line)
835                     irc.IRCClient.lineReceived(self, line)
836                 def signedOn(self):
837                     irc.IRCClient.signedOn(self)
838                     self.factory.resetDelay()
839                     self.join('#p2pool')
840                     self.watch_id = tracker.verified.added.watch(self._new_share)
841                     self.announced_hashes = set()
842                 def _new_share(self, share):
843                     if share.pow_hash <= share.header['bits'].target and share.header_hash not in self.announced_hashes:
844                         self.announced_hashes.add(share.header_hash)
845                         self.say('#p2pool', '\x02BLOCK FOUND by %s! http://blockexplorer.com/block/%064x' % (bitcoin_data.script2_to_address(share.share_data['new_script'], net.PARENT), share.header_hash))
846                 def connectionLost(self, reason):
847                     tracker.verified.added.unwatch(self.watch_id)
848                     print 'IRC connection lost:', reason.getErrorMessage()
849             class IRCClientFactory(protocol.ReconnectingClientFactory):
850                 protocol = IRCClient
851             reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())
852         
853         @defer.inlineCallbacks
854         def status_thread():
855             average_period = 600
856             first_pseudoshare_time = None
857             
858             last_str = None
859             last_time = 0
860             while True:
861                 yield deferral.sleep(3)
862                 try:
863                     if time.time() > current_work2.value['last_update'] + 60:
864                         print >>sys.stderr, '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
865                     
866                     height = tracker.get_height(current_work.value['best_share_hash'])
867                     this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
868                         height,
869                         len(tracker.verified.shares),
870                         len(tracker.shares),
871                         len(p2p_node.peers),
872                         sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
873                     ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
874                     
875                     if first_pseudoshare_time is None and recent_shares_ts_work2:
876                         first_pseudoshare_time = recent_shares_ts_work2[0][0]
877                     while recent_shares_ts_work2 and recent_shares_ts_work2[0][0] < time.time() - average_period:
878                         recent_shares_ts_work2.pop(0)
879                     my_att_s = sum(work for ts, work, dead in recent_shares_ts_work2)/min(time.time() - first_pseudoshare_time, average_period) if first_pseudoshare_time is not None else 0
880                     this_str += '\n Local: %sH/s (%.f min avg) Local dead on arrival: %s Expected time to share: %s' % (
881                         math.format(int(my_att_s)),
882                         (min(time.time() - first_pseudoshare_time, average_period) if first_pseudoshare_time is not None else 0)/60,
883                         math.format_binomial_conf(sum(1 for tx, work, dead in recent_shares_ts_work2 if dead), len(recent_shares_ts_work2), 0.95),
884                         '%.1f min' % (2**256 / tracker.shares[current_work.value['best_share_hash']].target / my_att_s / 60,) if my_att_s else '???',
885                     )
886                     
887                     if height > 2:
888                         (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
889                         stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], min(720, height))
890                         real_att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720)) / (1 - stale_prop)
891                         
892                         this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % (
893                             shares, stale_orphan_shares, stale_doa_shares,
894                             math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
895                             math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
896                             get_current_txouts().get(my_script, 0)*1e-8, net.PARENT.SYMBOL,
897                         )
898                         this_str += '\n Pool: %sH/s Stale rate: %.1f%% Average time between blocks: %.2f days' % (
899                             math.format(int(real_att_s)),
900                             100*stale_prop,
901                             2**256 / current_work.value['bits'].target / real_att_s / (60 * 60 * 24),
902                         )
903                     
904                     if this_str != last_str or time.time() > last_time + 15:
905                         print this_str
906                         last_str = this_str
907                         last_time = time.time()
908                 except:
909                     log.err()
910         status_thread()
911     except:
912         log.err(None, 'Fatal error:')
913         reactor.stop()
914
915 def run():
916     class FixedArgumentParser(argparse.ArgumentParser):
917         def _read_args_from_files(self, arg_strings):
918             # expand arguments referencing files
919             new_arg_strings = []
920             for arg_string in arg_strings:
921                 
922                 # for regular arguments, just add them back into the list
923                 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
924                     new_arg_strings.append(arg_string)
925                 
926                 # replace arguments referencing files with the file content
927                 else:
928                     try:
929                         args_file = open(arg_string[1:])
930                         try:
931                             arg_strings = []
932                             for arg_line in args_file.read().splitlines():
933                                 for arg in self.convert_arg_line_to_args(arg_line):
934                                     arg_strings.append(arg)
935                             arg_strings = self._read_args_from_files(arg_strings)
936                             new_arg_strings.extend(arg_strings)
937                         finally:
938                             args_file.close()
939                     except IOError:
940                         err = sys.exc_info()[1]
941                         self.error(str(err))
942             
943             # return the modified argument list
944             return new_arg_strings
945         
946         def convert_arg_line_to_args(self, arg_line):
947             return [arg for arg in arg_line.split() if arg.strip()]
948     
949     
950     realnets=dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
951     
952     parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
953     parser.add_argument('--version', action='version', version=p2pool.__version__)
954     parser.add_argument('--net',
955         help='use specified network (default: bitcoin)',
956         action='store', choices=sorted(realnets), default='bitcoin', dest='net_name')
957     parser.add_argument('--testnet',
958         help='''use the network's testnet''',
959         action='store_const', const=True, default=False, dest='testnet')
960     parser.add_argument('--debug',
961         help='enable debugging mode',
962         action='store_const', const=True, default=False, dest='debug')
963     parser.add_argument('-a', '--address',
964         help='generate payouts to this address (default: <address requested from bitcoind>)',
965         type=str, action='store', default=None, dest='address')
966     parser.add_argument('--logfile',
967         help='''log to this file (default: data/<NET>/log)''',
968         type=str, action='store', default=None, dest='logfile')
969     parser.add_argument('--merged',
970         help='call getauxblock on this url to get work for merged mining (example: http://ncuser:ncpass@127.0.0.1:10332/)',
971         type=str, action='append', default=[], dest='merged_urls')
972     parser.add_argument('--merged-url',
973         help='DEPRECATED, use --merged',
974         type=str, action='store', default=None, dest='merged_url')
975     parser.add_argument('--merged-userpass',
976         help='DEPRECATED, use --merged',
977         type=str, action='store', default=None, dest='merged_userpass')
978     parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
979         help='donate this percentage of work to author of p2pool (default: 0.5)',
980         type=float, action='store', default=0.5, dest='donation_percentage')
981     parser.add_argument('--irc-announce',
982         help='announce any blocks found on irc://irc.freenode.net/#p2pool',
983         action='store_true', default=False, dest='irc_announce')
984     
985     p2pool_group = parser.add_argument_group('p2pool interface')
986     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
987         help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
988         type=int, action='store', default=None, dest='p2pool_port')
989     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
990         help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
991         type=str, action='append', default=[], dest='p2pool_nodes')
992     parser.add_argument('--disable-upnp',
993         help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
994         action='store_false', default=True, dest='upnp')
995     
996     worker_group = parser.add_argument_group('worker interface')
997     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
998         help='listen on PORT for RPC connections from miners (default: %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
999         type=int, action='store', default=None, dest='worker_port')
1000     worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
1001         help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
1002         type=float, action='store', default=0, dest='worker_fee')
1003     
1004     bitcoind_group = parser.add_argument_group('bitcoind interface')
1005     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
1006         help='connect to this address (default: 127.0.0.1)',
1007         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
1008     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
1009         help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
1010         type=int, action='store', default=None, dest='bitcoind_rpc_port')
1011     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
1012         help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
1013         type=int, action='store', default=None, dest='bitcoind_p2p_port')
1014     
1015     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
1016         help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
1017         type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
1018     
1019     args = parser.parse_args()
1020     
1021     if args.debug:
1022         p2pool.DEBUG = True
1023     
1024     net_name = args.net_name + ('_testnet' if args.testnet else '')
1025     net = networks.nets[net_name]
1026     
1027     datadir_path = os.path.join(os.path.dirname(sys.argv[0]), 'data', net_name)
1028     if not os.path.exists(datadir_path):
1029         os.makedirs(datadir_path)
1030     
1031     if len(args.bitcoind_rpc_userpass) > 2:
1032         parser.error('a maximum of two arguments are allowed')
1033     args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
1034     
1035     if args.bitcoind_rpc_password is None:
1036         if not hasattr(net.PARENT, 'CONF_FILE_FUNC'):
1037             parser.error('This network has no configuration file function. Manually enter your RPC password.')
1038         conf_path = net.PARENT.CONF_FILE_FUNC()
1039         if not os.path.exists(conf_path):
1040             parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
1041                 '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
1042                 '''\r\n'''
1043                 '''server=1\r\n'''
1044                 '''rpcpassword=%x''' % (conf_path, random.randrange(2**128)))
1045         with open(conf_path, 'rb') as f:
1046             cp = ConfigParser.RawConfigParser()
1047             cp.readfp(StringIO.StringIO('[x]\r\n' + f.read()))
1048             for conf_name, var_name, var_type in [
1049                 ('rpcuser', 'bitcoind_rpc_username', str),
1050                 ('rpcpassword', 'bitcoind_rpc_password', str),
1051                 ('rpcport', 'bitcoind_rpc_port', int),
1052                 ('port', 'bitcoind_p2p_port', int),
1053             ]:
1054                 if getattr(args, var_name) is None and cp.has_option('x', conf_name):
1055                     setattr(args, var_name, var_type(cp.get('x', conf_name)))
1056     
1057     if args.bitcoind_rpc_username is None:
1058         args.bitcoind_rpc_username = ''
1059     
1060     if args.bitcoind_rpc_port is None:
1061         args.bitcoind_rpc_port = net.PARENT.RPC_PORT
1062     
1063     if args.bitcoind_p2p_port is None:
1064         args.bitcoind_p2p_port = net.PARENT.P2P_PORT
1065     
1066     if args.p2pool_port is None:
1067         args.p2pool_port = net.P2P_PORT
1068     
1069     if args.worker_port is None:
1070         args.worker_port = net.WORKER_PORT
1071     
1072     if args.address is not None:
1073         try:
1074             args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
1075         except Exception, e:
1076             parser.error('error parsing address: ' + repr(e))
1077     else:
1078         args.pubkey_hash = None
1079     
1080     def separate_url(url):
1081         s = urlparse.urlsplit(url)
1082         if '@' not in s.netloc:
1083             parser.error('merged url netloc must contain an "@"')
1084         userpass, new_netloc = s.netloc.rsplit('@', 1)
1085         return urlparse.urlunsplit(s._replace(netloc=new_netloc)), userpass
1086     merged_urls = map(separate_url, args.merged_urls)
1087     
1088     if args.merged_url is not None or args.merged_userpass is not None:
1089         print '--merged-url and --merged-userpass are deprecated! Use --merged http://USER:PASS@HOST:PORT/ instead!'
1090         print 'Pausing 10 seconds...'
1091         time.sleep(10)
1092         
1093         if args.merged_url is None or args.merged_userpass is None:
1094             parser.error('must specify both --merged-url and --merged-userpass')
1095         
1096         merged_urls = merged_urls + [(args.merged_url, args.merged_userpass)]
1097     
1098     
1099     if args.logfile is None:
1100         args.logfile = os.path.join(datadir_path, 'log')
1101     
1102     logfile = logging.LogFile(args.logfile)
1103     pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
1104     sys.stdout = logging.AbortPipe(pipe)
1105     sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
1106     if hasattr(signal, "SIGUSR1"):
1107         def sigusr1(signum, frame):
1108             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
1109             logfile.reopen()
1110             print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
1111         signal.signal(signal.SIGUSR1, sigusr1)
1112     task.LoopingCall(logfile.reopen).start(5)
1113     
1114     reactor.callWhenRunning(main, args, net, datadir_path, merged_urls)
1115     reactor.run()