17c263075df176d6aa9398142bbd02036fc2c7b0
[p2pool.git] / p2pool / main.py
1 from __future__ import division
2
3 import ConfigParser
4 import StringIO
5 import argparse
6 import os
7 import random
8 import struct
9 import sys
10 import time
11 import json
12 import signal
13 import traceback
14 import urlparse
15
16 from twisted.internet import defer, error, reactor, protocol, task
17 from twisted.web import server, resource
18 from twisted.python import log
19 from nattraverso import portmapper, ipdiscover
20
21 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
22 from bitcoin import worker_interface
23 from util import expiring_dict, jsonrpc, variable, deferral, math, logging, pack
24 from . import p2p, networks, graphs
25 import p2pool, p2pool.data as p2pool_data
26
27 @deferral.retry('Error getting work from bitcoind:', 3)
28 @defer.inlineCallbacks
29 def getwork(bitcoind):
30     work = yield bitcoind.rpc_getmemorypool()
31     transactions = [bitcoin_data.tx_type.unpack(x.decode('hex')) for x in work['transactions']]
32     defer.returnValue(dict(
33         version=work['version'],
34         previous_block_hash=int(work['previousblockhash'], 16),
35         transactions=transactions,
36         merkle_branch=bitcoin_data.calculate_merkle_branch([0] + [bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in transactions], 0),
37         subsidy=work['coinbasevalue'],
38         time=work['time'],
39         bits=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
40         coinbaseflags=work['coinbaseflags'].decode('hex') if 'coinbaseflags' in work else ''.join(x.decode('hex') for x in work['coinbaseaux'].itervalues()) if 'coinbaseaux' in work else '',
41     ))
42
43 @defer.inlineCallbacks
44 def main(args, net, datadir_path, merged_urls):
45     try:
46         print 'p2pool (version %s)' % (p2pool.__version__,)
47         print
48         try:
49             from . import draw
50         except ImportError:
51             draw = None
52             print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
53             print
54         
55         # connect to bitcoind over JSON-RPC and do initial getmemorypool
56         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
57         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
58         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
59         good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.PARENT.RPC_CHECK)(bitcoind)
60         if not good:
61             print >>sys.stderr, "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
62             return
63         temp_work = yield getwork(bitcoind)
64         print '    ...success!'
65         print '    Current block hash: %x' % (temp_work['previous_block_hash'],)
66         print
67         
68         # connect to bitcoind over bitcoin-p2p
69         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
70         factory = bitcoin_p2p.ClientFactory(net.PARENT)
71         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
72         yield factory.getProtocol() # waits until handshake is successful
73         print '    ...success!'
74         print
75         
76         if args.pubkey_hash is None:
77             print 'Getting payout address from bitcoind...'
78             my_script = yield deferral.retry('Error getting payout address from bitcoind:', 5)(defer.inlineCallbacks(lambda: defer.returnValue(
79                 bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net.PARENT)))
80             ))()
81         else:
82             print 'Computing payout script from provided address....'
83             my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
84         print '    ...success!'
85         print '    Payout script:', bitcoin_data.script2_to_human(my_script, net.PARENT)
86         print
87         
88         ht = bitcoin_p2p.HeightTracker(bitcoind, factory)
89         
90         my_share_hashes = set()
91         my_doa_share_hashes = set()
92         
93         tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes)
94         shared_share_hashes = set()
95         ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
96         known_verified = set()
97         recent_blocks = []
98         print "Loading shares..."
99         for i, (mode, contents) in enumerate(ss.get_shares()):
100             if mode == 'share':
101                 if contents.hash in tracker.shares:
102                     continue
103                 shared_share_hashes.add(contents.hash)
104                 contents.time_seen = 0
105                 tracker.add(contents)
106                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
107                     print "    %i" % (len(tracker.shares),)
108             elif mode == 'verified_hash':
109                 known_verified.add(contents)
110             else:
111                 raise AssertionError()
112         print "    ...inserting %i verified shares..." % (len(known_verified),)
113         for h in known_verified:
114             if h not in tracker.shares:
115                 ss.forget_verified_share(h)
116                 continue
117             tracker.verified.add(tracker.shares[h])
118         print "    ...done loading %i shares!" % (len(tracker.shares),)
119         print
120         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
121         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
122         tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
123         
124         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
125         
126         pre_current_work = variable.Variable(None)
127         pre_merged_work = variable.Variable({})
128         # information affecting work that should trigger a long-polling update
129         current_work = variable.Variable(None)
130         # information affecting work that should not trigger a long-polling update
131         current_work2 = variable.Variable(None)
132         
133         requested = expiring_dict.ExpiringDict(300)
134         
135         @defer.inlineCallbacks
136         def set_real_work1():
137             work = yield getwork(bitcoind)
138             current_work2.set(dict(
139                 time=work['time'],
140                 transactions=work['transactions'],
141                 merkle_branch=work['merkle_branch'],
142                 subsidy=work['subsidy'],
143                 clock_offset=time.time() - work['time'],
144                 last_update=time.time(),
145             )) # second set first because everything hooks on the first
146             pre_current_work.set(dict(
147                 version=work['version'],
148                 previous_block=work['previous_block_hash'],
149                 bits=work['bits'],
150                 coinbaseflags=work['coinbaseflags'],
151             ))
152         
153         def set_real_work2():
154             best, desired = tracker.think(ht, pre_current_work.value['previous_block'])
155             
156             t = dict(pre_current_work.value)
157             t['best_share_hash'] = best
158             t['mm_chains'] = pre_merged_work.value
159             current_work.set(t)
160             
161             t = time.time()
162             for peer2, share_hash in desired:
163                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
164                     continue
165                 last_request_time, count = requested.get(share_hash, (None, 0))
166                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
167                     continue
168                 potential_peers = set()
169                 for head in tracker.tails[share_hash]:
170                     potential_peers.update(peer_heads.get(head, set()))
171                 potential_peers = [peer for peer in potential_peers if peer.connected2]
172                 if count == 0 and peer2 is not None and peer2.connected2:
173                     peer = peer2
174                 else:
175                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
176                     if peer is None:
177                         continue
178                 
179                 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
180                 peer.send_getshares(
181                     hashes=[share_hash],
182                     parents=2000,
183                     stops=list(set(tracker.heads) | set(
184                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
185                     ))[:100],
186                 )
187                 requested[share_hash] = t, count + 1
188         pre_current_work.changed.watch(lambda _: set_real_work2())
189         
190         print 'Initializing work...'
191         yield set_real_work1()
192         print '    ...success!'
193         print
194         
195         pre_merged_work.changed.watch(lambda _: set_real_work2())
196         ht.updated.watch(set_real_work2)
197         
198         
199         @defer.inlineCallbacks
200         def set_merged_work(merged_url, merged_userpass):
201             merged_proxy = jsonrpc.Proxy(merged_url, (merged_userpass,))
202             while True:
203                 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged_proxy.rpc_getauxblock)()
204                 pre_merged_work.set(dict(pre_merged_work.value, **{auxblock['chainid']: dict(
205                     hash=int(auxblock['hash'], 16),
206                     target=pack.IntType(256).unpack(auxblock['target'].decode('hex')),
207                     merged_proxy=merged_proxy,
208                 )}))
209                 yield deferral.sleep(1)
210         for merged_url, merged_userpass in merged_urls:
211             set_merged_work(merged_url, merged_userpass)
212         
213         @pre_merged_work.changed.watch
214         def _(new_merged_work):
215             print 'Got new merged mining work!'
216         
217         # setup p2p logic and join p2pool network
218         
219         class Node(p2p.Node):
220             def handle_shares(self, shares, peer):
221                 if len(shares) > 5:
222                     print 'Processing %i shares from %s...' % (len(shares), '%s:%i' % peer.addr if peer is not None else None)
223                 
224                 new_count = 0
225                 for share in shares:
226                     if share.hash in tracker.shares:
227                         #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
228                         continue
229                     
230                     new_count += 1
231                     
232                     #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
233                     
234                     tracker.add(share)
235                 
236                 if shares and peer is not None:
237                     peer_heads.setdefault(shares[0].hash, set()).add(peer)
238                 
239                 if new_count:
240                     set_real_work2()
241                 
242                 if len(shares) > 5:
243                     print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
244             
245             def handle_share_hashes(self, hashes, peer):
246                 t = time.time()
247                 get_hashes = []
248                 for share_hash in hashes:
249                     if share_hash in tracker.shares:
250                         continue
251                     last_request_time, count = requested.get(share_hash, (None, 0))
252                     if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
253                         continue
254                     print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
255                     get_hashes.append(share_hash)
256                     requested[share_hash] = t, count + 1
257                 
258                 if hashes and peer is not None:
259                     peer_heads.setdefault(hashes[0], set()).add(peer)
260                 if get_hashes:
261                     peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
262             
263             def handle_get_shares(self, hashes, parents, stops, peer):
264                 parents = min(parents, 1000//len(hashes))
265                 stops = set(stops)
266                 shares = []
267                 for share_hash in hashes:
268                     for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))):
269                         if share.hash in stops:
270                             break
271                         shares.append(share)
272                 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
273                 peer.sendShares(shares)
274         
275         @tracker.verified.added.watch
276         def _(share):
277             if share.pow_hash <= share.header['bits'].target:
278                 if factory.conn.value is not None:
279                     factory.conn.value.send_block(block=share.as_block(tracker))
280                 else:
281                     print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Erp!'
282                 print
283                 print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %x' % (p2pool_data.format_hash(share.hash), share.header_hash)
284                 print
285                 recent_blocks.append({ 'ts': share.timestamp, 'hash': '%x' % (share.header_hash) })
286         
287         print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
288         
289         @defer.inlineCallbacks
290         def parse(x):
291             if ':' in x:
292                 ip, port = x.split(':')
293                 defer.returnValue(((yield reactor.resolve(ip)), int(port)))
294             else:
295                 defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT))
296         
297         addrs = {}
298         if os.path.exists(os.path.join(datadir_path, 'addrs.txt')):
299             try:
300                 addrs.update(dict(eval(x) for x in open(os.path.join(datadir_path, 'addrs.txt'))))
301             except:
302                 print >>sys.stderr, "error reading addrs"
303         for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
304             try:
305                 addr = yield addr_df
306                 if addr not in addrs:
307                     addrs[addr] = (0, time.time(), time.time())
308             except:
309                 log.err()
310         
311         connect_addrs = set()
312         for addr_df in map(parse, args.p2pool_nodes):
313             try:
314                 connect_addrs.add((yield addr_df))
315             except:
316                 log.err()
317         
318         p2p_node = Node(
319             best_share_hash_func=lambda: current_work.value['best_share_hash'],
320             port=args.p2pool_port,
321             net=net,
322             addr_store=addrs,
323             connect_addrs=connect_addrs,
324         )
325         p2p_node.start()
326         
327         def save_addrs():
328             open(os.path.join(datadir_path, 'addrs.txt'), 'w').writelines(repr(x) + '\n' for x in p2p_node.addr_store.iteritems())
329         task.LoopingCall(save_addrs).start(60)
330         
331         # send share when the chain changes to their chain
332         def work_changed(new_work):
333             #print 'Work changed:', new_work
334             shares = []
335             for share in tracker.get_chain(new_work['best_share_hash'], min(5, tracker.get_height(new_work['best_share_hash']))):
336                 if share.hash in shared_share_hashes:
337                     break
338                 shared_share_hashes.add(share.hash)
339                 shares.append(share)
340             
341             for peer in p2p_node.peers.itervalues():
342                 peer.sendShares([share for share in shares if share.peer is not peer])
343         
344         current_work.changed.watch(work_changed)
345         
346         def save_shares():
347             for share in tracker.get_chain(current_work.value['best_share_hash'], min(tracker.get_height(current_work.value['best_share_hash']), 2*net.CHAIN_LENGTH)):
348                 ss.add_share(share)
349                 if share.hash in tracker.verified.shares:
350                     ss.add_verified_hash(share.hash)
351         task.LoopingCall(save_shares).start(60)
352         
353         print '    ...success!'
354         print
355         
356         @defer.inlineCallbacks
357         def upnp_thread():
358             while True:
359                 try:
360                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
361                     if is_lan:
362                         pm = yield portmapper.get_port_mapper()
363                         yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
364                 except defer.TimeoutError:
365                     pass
366                 except:
367                     if p2pool.DEBUG:
368                         log.err(None, "UPnP error:")
369                 yield deferral.sleep(random.expovariate(1/120))
370         
371         if args.upnp:
372             upnp_thread()
373         
374         # start listening for workers with a JSON-RPC server
375         
376         print 'Listening for workers on port %i...' % (args.worker_port,)
377         
378         if os.path.exists(os.path.join(datadir_path, 'vip_pass')):
379             with open(os.path.join(datadir_path, 'vip_pass'), 'rb') as f:
380                 vip_pass = f.read().strip('\r\n')
381         else:
382             vip_pass = '%016x' % (random.randrange(2**64),)
383             with open(os.path.join(datadir_path, 'vip_pass'), 'wb') as f:
384                 f.write(vip_pass)
385         print '    Worker password:', vip_pass, '(only required for generating graphs)'
386         
387         # setup worker logic
388         
389         removed_unstales_var = variable.Variable((0, 0, 0))
390         @tracker.verified.removed.watch
391         def _(share):
392             if share.hash in my_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
393                 assert share.share_data['stale_info'] in [0, 253, 254] # we made these shares in this instance
394                 removed_unstales_var.set((
395                     removed_unstales_var.value[0] + 1,
396                     removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 253 else 0),
397                     removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 254 else 0),
398                 ))
399         
400         removed_doa_unstales_var = variable.Variable(0)
401         @tracker.verified.removed.watch
402         def _(share):
403             if share.hash in my_doa_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
404                 removed_doa_unstales.set(removed_doa_unstales.value + 1)
405         
406         def get_stale_counts():
407             '''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)'''
408             my_shares = len(my_share_hashes)
409             my_doa_shares = len(my_doa_share_hashes)
410             delta = tracker.verified.get_delta(current_work.value['best_share_hash'])
411             my_shares_in_chain = delta.my_count + removed_unstales_var.value[0]
412             my_doa_shares_in_chain = delta.my_doa_count + removed_doa_unstales_var.value
413             orphans_recorded_in_chain = delta.my_orphan_announce_count + removed_unstales_var.value[1]
414             doas_recorded_in_chain = delta.my_dead_announce_count + removed_unstales_var.value[2]
415             
416             my_shares_not_in_chain = my_shares - my_shares_in_chain
417             my_doa_shares_not_in_chain = my_doa_shares - my_doa_shares_in_chain
418             
419             return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
420         
421         
422         recent_shares_ts_work2 = []
423         
424         class WorkerBridge(worker_interface.WorkerBridge):
425             def __init__(self):
426                 worker_interface.WorkerBridge.__init__(self)
427                 self.new_work_event = current_work.changed
428                 
429                 self.merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
430                 self.recent_shares_ts_work = []
431             
432             def _get_payout_script_from_username(self, user):
433                 if user is None:
434                     return None
435                 try:
436                     pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, net.PARENT)
437                 except: # XXX blah
438                     return None
439                 return bitcoin_data.pubkey_hash_to_script2(pubkey_hash)
440             
441             def preprocess_request(self, request):
442                 payout_script = self._get_payout_script_from_username(request.getUser())
443                 if payout_script is None or random.uniform(0, 100) < args.worker_fee:
444                     payout_script = my_script
445                 return payout_script,
446             
447             def get_work(self, payout_script):
448                 if len(p2p_node.peers) == 0 and net.PERSIST:
449                     raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
450                 if current_work.value['best_share_hash'] is None and net.PERSIST:
451                     raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
452                 if time.time() > current_work2.value['last_update'] + 60:
453                     raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
454                 
455                 if current_work.value['mm_chains']:
456                     tree, size = bitcoin_data.make_auxpow_tree(current_work.value['mm_chains'])
457                     mm_hashes = [current_work.value['mm_chains'].get(tree.get(i), dict(hash=0))['hash'] for i in xrange(size)]
458                     mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(dict(
459                         merkle_root=bitcoin_data.merkle_hash(mm_hashes),
460                         size=size,
461                         nonce=0,
462                     ))
463                     mm_later = [(aux_work, mm_hashes.index(aux_work['hash']), mm_hashes) for chain_id, aux_work in current_work.value['mm_chains'].iteritems()]
464                 else:
465                     mm_data = ''
466                     mm_later = []
467                 
468                 share_info, generate_tx = p2pool_data.generate_transaction(
469                     tracker=tracker,
470                     share_data=dict(
471                         previous_share_hash=current_work.value['best_share_hash'],
472                         coinbase=(mm_data + current_work.value['coinbaseflags'])[:100],
473                         nonce=struct.pack('<Q', random.randrange(2**64)),
474                         new_script=payout_script,
475                         subsidy=current_work2.value['subsidy'],
476                         donation=math.perfect_round(65535*args.donation_percentage/100),
477                         stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
478                             253 if orphans > orphans_recorded_in_chain else
479                             254 if doas > doas_recorded_in_chain else
480                             0
481                         )(*get_stale_counts()),
482                     ),
483                     block_target=current_work.value['bits'].target,
484                     desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
485                     net=net,
486                 )
487                 
488                 target = 2**256//2**32 - 1
489                 if len(self.recent_shares_ts_work) == 50:
490                     hash_rate = sum(work for ts, work in self.recent_shares_ts_work)//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
491                     target = min(target, 2**256//(hash_rate * 5))
492                 target = max(target, share_info['bits'].target)
493                 for aux_work in current_work.value['mm_chains'].itervalues():
494                     target = max(target, aux_work['target'])
495                 
496                 transactions = [generate_tx] + list(current_work2.value['transactions'])
497                 merkle_root = bitcoin_data.check_merkle_branch(bitcoin_data.hash256(bitcoin_data.tx_type.pack(generate_tx)), 0, current_work2.value['merkle_branch'])
498                 self.merkle_root_to_transactions[merkle_root] = share_info, transactions, time.time(), mm_later, target, current_work2.value['merkle_branch']
499                 
500                 print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % (
501                     bitcoin_data.target_to_difficulty(target),
502                     bitcoin_data.target_to_difficulty(share_info['bits'].target),
503                     current_work2.value['subsidy']*1e-8, net.PARENT.SYMBOL,
504                     len(current_work2.value['transactions']),
505                 )
506                 
507                 return bitcoin_getwork.BlockAttempt(
508                     version=current_work.value['version'],
509                     previous_block=current_work.value['previous_block'],
510                     merkle_root=merkle_root,
511                     timestamp=current_work2.value['time'],
512                     bits=current_work.value['bits'],
513                     share_target=target,
514                 )
515             
516             def got_response(self, header, request):
517                 # match up with transactions
518                 if header['merkle_root'] not in self.merkle_root_to_transactions:
519                     print >>sys.stderr, '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
520                     return False
521                 share_info, transactions, getwork_time, mm_later, target, merkle_branch = self.merkle_root_to_transactions[header['merkle_root']]
522                 
523                 pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
524                 on_time = current_work.value['best_share_hash'] == share_info['share_data']['previous_share_hash']
525                 
526                 try:
527                     if pow_hash <= header['bits'].target or p2pool.DEBUG:
528                         if factory.conn.value is not None:
529                             factory.conn.value.send_block(block=dict(header=header, txs=transactions))
530                         else:
531                             print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Erp!'
532                         if pow_hash <= header['bits'].target:
533                             print
534                             print 'GOT BLOCK FROM MINER! Passing to bitcoind! bitcoin: %x' % (bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)),)
535                             print
536                             recent_blocks.append({ 'ts': time.time(), 'hash': '%x' % (bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)),) })
537                 except:
538                     log.err(None, 'Error while processing potential block:')
539                 
540                 for aux_work, index, hashes in mm_later:
541                     try:
542                         if pow_hash <= aux_work['target'] or p2pool.DEBUG:
543                             df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)(
544                                 pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
545                                 bitcoin_data.aux_pow_type.pack(dict(
546                                     merkle_tx=dict(
547                                         tx=transactions[0],
548                                         block_hash=bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)),
549                                         merkle_branch=merkle_branch,
550                                         index=0,
551                                     ),
552                                     merkle_branch=bitcoin_data.calculate_merkle_branch(hashes, index),
553                                     index=index,
554                                     parent_block_header=header,
555                                 )).encode('hex'),
556                             )
557                             @df.addCallback
558                             def _(result):
559                                 if result != (pow_hash <= aux_work['target']):
560                                     print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
561                                 else:
562                                     print 'Merged block submittal result: %s' % (result,)
563                             @df.addErrback
564                             def _(err):
565                                 log.err(err, 'Error submitting merged block:')
566                     except:
567                         log.err(None, 'Error while processing merged mining POW:')
568                 
569                 if pow_hash <= share_info['bits'].target:
570                     share = p2pool_data.Share(net, header, share_info, merkle_branch=merkle_branch, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
571                     print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
572                         request.getUser(),
573                         p2pool_data.format_hash(share.hash),
574                         p2pool_data.format_hash(share.previous_hash),
575                         time.time() - getwork_time,
576                         ' DEAD ON ARRIVAL' if not on_time else '',
577                     )
578                     my_share_hashes.add(share.hash)
579                     if not on_time:
580                         my_doa_share_hashes.add(share.hash)
581                     
582                     tracker.add(share)
583                     if not p2pool.DEBUG:
584                         tracker.verified.add(share)
585                     set_real_work2()
586                     
587                     try:
588                         if pow_hash <= header['bits'].target or p2pool.DEBUG:
589                             for peer in p2p_node.peers.itervalues():
590                                 peer.sendShares([share])
591                             shared_share_hashes.add(share.hash)
592                     except:
593                         log.err(None, 'Error forwarding block solution:')
594                 
595                 if pow_hash <= target:
596                     reactor.callLater(1, grapher.add_localrate_point, bitcoin_data.target_to_average_attempts(target), not on_time)
597                     if request.getPassword() == vip_pass:
598                         reactor.callLater(1, grapher.add_localminer_point, request.getUser(), bitcoin_data.target_to_average_attempts(target), not on_time)
599                     self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
600                     while len(self.recent_shares_ts_work) > 50:
601                         self.recent_shares_ts_work.pop(0)
602                     recent_shares_ts_work2.append((time.time(), bitcoin_data.target_to_average_attempts(target), not on_time))
603                 
604                 
605                 if pow_hash > target:
606                     print 'Worker submitted share with hash > target:'
607                     print '    Hash:   %56x' % (pow_hash,)
608                     print '    Target: %56x' % (target,)
609                 
610                 return on_time
611         
612         web_root = resource.Resource()
613         worker_interface.WorkerInterface(WorkerBridge()).attach_to(web_root)
614         
615         def get_rate():
616             if tracker.get_height(current_work.value['best_share_hash']) < 720:
617                 return json.dumps(None)
618             return json.dumps(p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
619                 / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720)))
620         
621         def get_users():
622             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
623             weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
624             res = {}
625             for script in sorted(weights, key=lambda s: weights[s]):
626                 res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
627             return json.dumps(res)
628         
629         def get_current_txouts():
630             share = tracker.shares[current_work.value['best_share_hash']]
631             share_info, gentx = p2pool_data.generate_transaction(tracker, share.share_info['share_data'], share.header['bits'].target, share.share_info['timestamp'], share.net)
632             return dict((out['script'], out['value']) for out in gentx['tx_outs'])
633         
634         def get_current_scaled_txouts(scale, trunc=0):
635             txouts = get_current_txouts()
636             total = sum(txouts.itervalues())
637             results = dict((script, value*scale//total) for script, value in txouts.iteritems())
638             if trunc > 0:
639                 total_random = 0
640                 random_set = set()
641                 for s in sorted(results, key=results.__getitem__):
642                     if results[s] >= trunc:
643                         break
644                     total_random += results[s]
645                     random_set.add(s)
646                 if total_random:
647                     winner = math.weighted_choice((script, results[script]) for script in random_set)
648                     for script in random_set:
649                         del results[script]
650                     results[winner] = total_random
651             if sum(results.itervalues()) < int(scale):
652                 results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
653             return results
654         
655         def get_current_payouts():
656             return json.dumps(dict((bitcoin_data.script2_to_human(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems()))
657         
658         def get_patron_sendmany(this):
659             try:
660                 if '/' in this:
661                     this, trunc = this.split('/', 1)
662                 else:
663                     trunc = '0.01'
664                 return json.dumps(dict(
665                     (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
666                     for script, value in get_current_scaled_txouts(scale=int(float(this)*1e8), trunc=int(float(trunc)*1e8)).iteritems()
667                     if bitcoin_data.script2_to_address(script, net.PARENT) is not None
668                 ))
669             except:
670                 return json.dumps(None)
671         
672         def get_global_stats():
673             # averaged over last hour
674             lookbehind = 3600//net.SHARE_PERIOD
675             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
676                 return None
677             
678             nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
679             stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
680             return json.dumps(dict(
681                 pool_nonstale_hash_rate=nonstale_hash_rate,
682                 pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
683                 pool_stale_prop=stale_prop,
684             ))
685         
686         def get_local_stats():
687             lookbehind = 3600//net.SHARE_PERIOD
688             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
689                 return None
690             
691             global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
692             
693             my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
694             my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
695             my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
696             my_share_count = my_unstale_count + my_orphan_count + my_doa_count
697             my_stale_count = my_orphan_count + my_doa_count
698             
699             my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
700             
701             my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
702                 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1)
703                 if share.hash in my_share_hashes)
704             actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp -
705                 tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp)
706             share_att_s = my_work / actual_time
707             
708             return json.dumps(dict(
709                 my_hash_rates_in_last_hour=dict(
710                     nonstale=share_att_s,
711                     rewarded=share_att_s/(1 - global_stale_prop),
712                     actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
713                 ),
714                 my_share_counts_in_last_hour=dict(
715                     shares=my_share_count,
716                     unstale_shares=my_unstale_count,
717                     stale_shares=my_stale_count,
718                     orphan_stale_shares=my_orphan_count,
719                     doa_stale_shares=my_doa_count,
720                 ),
721                 my_stale_proportions_in_last_hour=dict(
722                     stale=my_stale_prop,
723                     orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
724                     dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
725                 ),
726             ))
727         
728         def get_peer_addresses():
729             return ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues())
730         
731         class WebInterface(resource.Resource):
732             def __init__(self, func, mime_type, *fields):
733                 self.func, self.mime_type, self.fields = func, mime_type, fields
734             
735             def render_GET(self, request):
736                 request.setHeader('Content-Type', self.mime_type)
737                 request.setHeader('Access-Control-Allow-Origin', '*')
738                 return self.func(*(request.args[field][0] for field in self.fields))
739         
740         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
741         web_root.putChild('users', WebInterface(get_users, 'application/json'))
742         web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
743         web_root.putChild('current_payouts', WebInterface(get_current_payouts, 'application/json'))
744         web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain', 'total'))
745         web_root.putChild('global_stats', WebInterface(get_global_stats, 'application/json'))
746         web_root.putChild('local_stats', WebInterface(get_local_stats, 'application/json'))
747         web_root.putChild('peer_addresses', WebInterface(get_peer_addresses, 'text/plain'))
748         web_root.putChild('payout_addr', WebInterface(lambda: json.dumps(bitcoin_data.script2_to_human(my_script, net.PARENT)), 'application/json'))
749         web_root.putChild('recent_blocks', WebInterface(lambda: json.dumps(recent_blocks), 'application/json'))
750         if draw is not None:
751             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
752         
753         grapher = graphs.Grapher(os.path.join(datadir_path, 'rrd'))
754         web_root.putChild('graphs', grapher.get_resource())
755         def add_point():
756             if tracker.get_height(current_work.value['best_share_hash']) < 720:
757                 return
758             nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
759             poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
760             grapher.add_poolrate_point(poolrate, poolrate - nonstalerate)
761         task.LoopingCall(add_point).start(100)
762         
763         def attempt_listen():
764             try:
765                 reactor.listenTCP(args.worker_port, server.Site(web_root))
766             except error.CannotListenError, e:
767                 print >>sys.stderr, 'Error binding to worker port: %s. Retrying in 1 second.' % (e.socketError,)
768                 reactor.callLater(1, attempt_listen)
769             else:
770                 with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
771                     pass
772         attempt_listen()
773         
774         print '    ...success!'
775         print
776         
777         
778         @defer.inlineCallbacks
779         def work_poller():
780             while True:
781                 flag = factory.new_block.get_deferred()
782                 try:
783                     yield set_real_work1()
784                 except:
785                     log.err()
786                 yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
787         work_poller()
788         
789         
790         # done!
791         print 'Started successfully!'
792         print
793         
794         
795         if hasattr(signal, 'SIGALRM'):
796             signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
797                 sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
798             ))
799             signal.siginterrupt(signal.SIGALRM, False)
800             task.LoopingCall(signal.alarm, 30).start(1)
801         
802         if args.irc_announce:
803             from twisted.words.protocols import irc
804             class IRCClient(irc.IRCClient):
805                 nickname = 'p2pool'
806                 def lineReceived(self, line):
807                     print repr(line)
808                     irc.IRCClient.lineReceived(self, line)
809                 def signedOn(self):
810                     irc.IRCClient.signedOn(self)
811                     self.factory.resetDelay()
812                     self.join('#p2pool')
813                     self.watch_id = tracker.verified.added.watch(self._new_share)
814                     self.announced_hashes = set()
815                 def _new_share(self, share):
816                     if share.pow_hash <= share.header['bits'].target and share.header_hash not in self.announced_hashes:
817                         self.announced_hashes.add(share.header_hash)
818                         self.say('#p2pool', '\x02BLOCK FOUND by %s! http://blockexplorer.com/block/%064x' % (bitcoin_data.script2_to_address(share.new_script, net.PARENT), share.header_hash))
819                 def connectionLost(self, reason):
820                     tracker.verified.added.unwatch(self.watch_id)
821             class IRCClientFactory(protocol.ReconnectingClientFactory):
822                 protocol = IRCClient
823             reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())
824         
825         @defer.inlineCallbacks
826         def status_thread():
827             average_period = 600
828             first_pseudoshare_time = None
829             
830             last_str = None
831             last_time = 0
832             while True:
833                 yield deferral.sleep(3)
834                 try:
835                     if time.time() > current_work2.value['last_update'] + 60:
836                         print >>sys.stderr, '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
837                     
838                     height = tracker.get_height(current_work.value['best_share_hash'])
839                     this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
840                         height,
841                         len(tracker.verified.shares),
842                         len(tracker.shares),
843                         len(p2p_node.peers),
844                         sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
845                     ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
846                     
847                     if first_pseudoshare_time is None and recent_shares_ts_work2:
848                         first_pseudoshare_time = recent_shares_ts_work2[0][0]
849                     while recent_shares_ts_work2 and recent_shares_ts_work2[0][0] < time.time() - average_period:
850                         recent_shares_ts_work2.pop(0)
851                     my_att_s = sum(work for ts, work, dead in recent_shares_ts_work2)/min(time.time() - first_pseudoshare_time, average_period) if first_pseudoshare_time is not None else 0
852                     this_str += '\n Local: %sH/s (%.f min avg) Local dead on arrival: %s Expected time to share: %s' % (
853                         math.format(int(my_att_s)),
854                         (min(time.time() - first_pseudoshare_time, average_period) if first_pseudoshare_time is not None else 0)/60,
855                         math.format_binomial_conf(sum(1 for tx, work, dead in recent_shares_ts_work2 if dead), len(recent_shares_ts_work2), 0.95),
856                         '%.1f min' % (2**256 / tracker.shares[current_work.value['best_share_hash']].target / my_att_s / 60,) if my_att_s else '???',
857                     )
858                     
859                     if height > 2:
860                         (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
861                         stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], min(720, height))
862                         real_att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720)) / (1 - stale_prop)
863                         
864                         this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % (
865                             shares, stale_orphan_shares, stale_doa_shares,
866                             math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
867                             math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
868                             get_current_txouts().get(my_script, 0)*1e-8, net.PARENT.SYMBOL,
869                         )
870                         this_str += '\n Pool: %sH/s Stale rate: %.1f%% Average time between blocks: %.2f days' % (
871                             math.format(int(real_att_s)),
872                             100*stale_prop,
873                             2**256 / current_work.value['bits'].target / real_att_s / (60 * 60 * 24),
874                         )
875                     
876                     if this_str != last_str or time.time() > last_time + 15:
877                         print this_str
878                         last_str = this_str
879                         last_time = time.time()
880                 except:
881                     log.err()
882         status_thread()
883     except:
884         log.err(None, 'Fatal error:')
885         reactor.stop()
886
887 def run():
888     class FixedArgumentParser(argparse.ArgumentParser):
889         def _read_args_from_files(self, arg_strings):
890             # expand arguments referencing files
891             new_arg_strings = []
892             for arg_string in arg_strings:
893                 
894                 # for regular arguments, just add them back into the list
895                 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
896                     new_arg_strings.append(arg_string)
897                 
898                 # replace arguments referencing files with the file content
899                 else:
900                     try:
901                         args_file = open(arg_string[1:])
902                         try:
903                             arg_strings = []
904                             for arg_line in args_file.read().splitlines():
905                                 for arg in self.convert_arg_line_to_args(arg_line):
906                                     arg_strings.append(arg)
907                             arg_strings = self._read_args_from_files(arg_strings)
908                             new_arg_strings.extend(arg_strings)
909                         finally:
910                             args_file.close()
911                     except IOError:
912                         err = sys.exc_info()[1]
913                         self.error(str(err))
914             
915             # return the modified argument list
916             return new_arg_strings
917         
918         def convert_arg_line_to_args(self, arg_line):
919             return [arg for arg in arg_line.split() if arg.strip()]
920     
921     
922     realnets=dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
923     
924     parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
925     parser.add_argument('--version', action='version', version=p2pool.__version__)
926     parser.add_argument('--net',
927         help='use specified network (default: bitcoin)',
928         action='store', choices=sorted(realnets), default='bitcoin', dest='net_name')
929     parser.add_argument('--testnet',
930         help='''use the network's testnet''',
931         action='store_const', const=True, default=False, dest='testnet')
932     parser.add_argument('--debug',
933         help='enable debugging mode',
934         action='store_const', const=True, default=False, dest='debug')
935     parser.add_argument('-a', '--address',
936         help='generate payouts to this address (default: <address requested from bitcoind>)',
937         type=str, action='store', default=None, dest='address')
938     parser.add_argument('--logfile',
939         help='''log to this file (default: data/<NET>/log)''',
940         type=str, action='store', default=None, dest='logfile')
941     parser.add_argument('--merged',
942         help='call getauxblock on this url to get work for merged mining (example: http://ncuser:ncpass@127.0.0.1:10332/)',
943         type=str, action='append', default=[], dest='merged_urls')
944     parser.add_argument('--merged-url',
945         help='DEPRECATED, use --merged',
946         type=str, action='store', default=None, dest='merged_url')
947     parser.add_argument('--merged-userpass',
948         help='DEPRECATED, use --merged',
949         type=str, action='store', default=None, dest='merged_userpass')
950     parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
951         help='donate this percentage of work to author of p2pool (default: 0.5)',
952         type=float, action='store', default=0.5, dest='donation_percentage')
953     parser.add_argument('--irc-announce',
954         help='announce any blocks found on irc://irc.freenode.net/#p2pool',
955         action='store_true', default=False, dest='irc_announce')
956     
957     p2pool_group = parser.add_argument_group('p2pool interface')
958     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
959         help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
960         type=int, action='store', default=None, dest='p2pool_port')
961     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
962         help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
963         type=str, action='append', default=[], dest='p2pool_nodes')
964     parser.add_argument('--disable-upnp',
965         help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
966         action='store_false', default=True, dest='upnp')
967     
968     worker_group = parser.add_argument_group('worker interface')
969     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
970         help='listen on PORT for RPC connections from miners (default: %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
971         type=int, action='store', default=None, dest='worker_port')
972     worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
973         help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
974         type=float, action='store', default=0, dest='worker_fee')
975     
976     bitcoind_group = parser.add_argument_group('bitcoind interface')
977     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
978         help='connect to this address (default: 127.0.0.1)',
979         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
980     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
981         help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
982         type=int, action='store', default=None, dest='bitcoind_rpc_port')
983     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
984         help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
985         type=int, action='store', default=None, dest='bitcoind_p2p_port')
986     
987     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
988         help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
989         type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
990     
991     args = parser.parse_args()
992     
993     if args.debug:
994         p2pool.DEBUG = True
995     
996     net_name = args.net_name + ('_testnet' if args.testnet else '')
997     net = networks.nets[net_name]
998     
999     datadir_path = os.path.join(os.path.dirname(sys.argv[0]), 'data', net_name)
1000     if not os.path.exists(datadir_path):
1001         os.makedirs(datadir_path)
1002     
1003     if len(args.bitcoind_rpc_userpass) > 2:
1004         parser.error('a maximum of two arguments are allowed')
1005     args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
1006     
1007     if args.bitcoind_rpc_password is None:
1008         if not hasattr(net.PARENT, 'CONF_FILE_FUNC'):
1009             parser.error('This network has no configuration file function. Manually enter your RPC password.')
1010         conf_path = net.PARENT.CONF_FILE_FUNC()
1011         if not os.path.exists(conf_path):
1012             parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
1013                 '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
1014                 '''\r\n'''
1015                 '''server=1\r\n'''
1016                 '''rpcpassword=%x''' % (conf_path, random.randrange(2**128)))
1017         with open(conf_path, 'rb') as f:
1018             cp = ConfigParser.RawConfigParser()
1019             cp.readfp(StringIO.StringIO('[x]\r\n' + f.read()))
1020             for conf_name, var_name, var_type in [
1021                 ('rpcuser', 'bitcoind_rpc_username', str),
1022                 ('rpcpassword', 'bitcoind_rpc_password', str),
1023                 ('rpcport', 'bitcoind_rpc_port', int),
1024                 ('port', 'bitcoind_p2p_port', int),
1025             ]:
1026                 if getattr(args, var_name) is None and cp.has_option('x', conf_name):
1027                     setattr(args, var_name, var_type(cp.get('x', conf_name)))
1028     
1029     if args.bitcoind_rpc_username is None:
1030         args.bitcoind_rpc_username = ''
1031     
1032     if args.bitcoind_rpc_port is None:
1033         args.bitcoind_rpc_port = net.PARENT.RPC_PORT
1034     
1035     if args.bitcoind_p2p_port is None:
1036         args.bitcoind_p2p_port = net.PARENT.P2P_PORT
1037     
1038     if args.p2pool_port is None:
1039         args.p2pool_port = net.P2P_PORT
1040     
1041     if args.worker_port is None:
1042         args.worker_port = net.WORKER_PORT
1043     
1044     if args.address is not None:
1045         try:
1046             args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
1047         except Exception, e:
1048             parser.error('error parsing address: ' + repr(e))
1049     else:
1050         args.pubkey_hash = None
1051     
1052     def separate_url(url):
1053         s = urlparse.urlsplit(url)
1054         if '@' not in s.netloc:
1055             parser.error('merged url netloc must contain an "@"')
1056         userpass, new_netloc = s.netloc.rsplit('@', 1)
1057         return urlparse.urlunsplit(s._replace(netloc=new_netloc)), userpass
1058     merged_urls = map(separate_url, args.merged_urls)
1059     
1060     if args.merged_url is not None or args.merged_userpass is not None:
1061         print '--merged-url and --merged-userpass are deprecated! Use --merged http://USER:PASS@HOST:PORT/ instead!'
1062         print 'Pausing 10 seconds...'
1063         time.sleep(10)
1064         
1065         if args.merged_url is None or args.merged_userpass is None:
1066             parser.error('must specify both --merged-url and --merged-userpass')
1067         
1068         merged_urls = merged_urls + [(args.merged_url, args.merged_userpass)]
1069     
1070     
1071     if args.logfile is None:
1072         args.logfile = os.path.join(datadir_path, 'log')
1073     
1074     logfile = logging.LogFile(args.logfile)
1075     pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
1076     sys.stdout = logging.AbortPipe(pipe)
1077     sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
1078     if hasattr(signal, "SIGUSR1"):
1079         def sigusr1(signum, frame):
1080             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
1081             logfile.reopen()
1082             print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
1083         signal.signal(signal.SIGUSR1, sigusr1)
1084     task.LoopingCall(logfile.reopen).start(5)
1085     
1086     reactor.callWhenRunning(main, args, net, datadir_path, merged_urls)
1087     reactor.run()