9566cef2793a2dfd145725127df67e6505c426fe
[p2pool.git] / p2pool / main.py
1 from __future__ import division
2
3 import ConfigParser
4 import StringIO
5 import argparse
6 import os
7 import random
8 import struct
9 import sys
10 import time
11 import json
12 import signal
13 import traceback
14
15 from twisted.internet import defer, reactor, protocol, task
16 from twisted.web import server, resource
17 from twisted.python import log
18 from nattraverso import portmapper, ipdiscover
19
20 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
21 from bitcoin import worker_interface
22 from util import expiring_dict, jsonrpc, variable, deferral, math, logging, pack
23 from . import p2p, networks, graphs
24 import p2pool, p2pool.data as p2pool_data
25
26 @deferral.retry('Error getting work from bitcoind:', 3)
27 @defer.inlineCallbacks
28 def getwork(bitcoind):
29     work = yield bitcoind.rpc_getmemorypool()
30     defer.returnValue(dict(
31         version=work['version'],
32         previous_block_hash=int(work['previousblockhash'], 16),
33         transactions=[bitcoin_data.tx_type.unpack(x.decode('hex')) for x in work['transactions']],
34         subsidy=work['coinbasevalue'],
35         time=work['time'],
36         bits=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
37         coinbaseflags=work['coinbaseflags'].decode('hex') if 'coinbaseflags' in work else ''.join(x.decode('hex') for x in work['coinbaseaux'].itervalues()) if 'coinbaseaux' in work else '',
38     ))
39
40 @defer.inlineCallbacks
41 def main(args, net, datadir_path):
42     try:
43         print 'p2pool (version %s)' % (p2pool.__version__,)
44         print
45         try:
46             from . import draw
47         except ImportError:
48             draw = None
49             print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
50             print
51         
52         # connect to bitcoind over JSON-RPC and do initial getmemorypool
53         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
54         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
55         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
56         good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.PARENT.RPC_CHECK)(bitcoind)
57         if not good:
58             print >>sys.stderr, "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
59             return
60         temp_work = yield getwork(bitcoind)
61         print '    ...success!'
62         print '    Current block hash: %x' % (temp_work['previous_block_hash'],)
63         print
64         
65         # connect to bitcoind over bitcoin-p2p
66         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
67         factory = bitcoin_p2p.ClientFactory(net.PARENT)
68         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
69         yield factory.getProtocol() # waits until handshake is successful
70         print '    ...success!'
71         print
72         
73         if args.pubkey_hash is None:
74             print 'Getting payout address from bitcoind...'
75             my_script = yield deferral.retry('Error getting payout address from bitcoind:', 5)(defer.inlineCallbacks(lambda: defer.returnValue(
76                 bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net.PARENT)))
77             ))()
78         else:
79             print 'Computing payout script from provided address....'
80             my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
81         print '    ...success!'
82         print '    Payout script:', bitcoin_data.script2_to_human(my_script, net.PARENT)
83         print
84         
85         ht = bitcoin_p2p.HeightTracker(bitcoind, factory)
86         
87         my_share_hashes = set()
88         my_doa_share_hashes = set()
89         
90         tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes)
91         shared_share_hashes = set()
92         ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
93         known_verified = set()
94         recent_blocks = []
95         print "Loading shares..."
96         for i, (mode, contents) in enumerate(ss.get_shares()):
97             if mode == 'share':
98                 if contents.hash in tracker.shares:
99                     continue
100                 shared_share_hashes.add(contents.hash)
101                 contents.time_seen = 0
102                 tracker.add(contents)
103                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
104                     print "    %i" % (len(tracker.shares),)
105             elif mode == 'verified_hash':
106                 known_verified.add(contents)
107             else:
108                 raise AssertionError()
109         print "    ...inserting %i verified shares..." % (len(known_verified),)
110         for h in known_verified:
111             if h not in tracker.shares:
112                 ss.forget_verified_share(h)
113                 continue
114             tracker.verified.add(tracker.shares[h])
115         print "    ...done loading %i shares!" % (len(tracker.shares),)
116         print
117         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
118         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
119         tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
120         
121         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
122         
123         pre_current_work = variable.Variable(None)
124         pre_merged_work = variable.Variable(None)
125         # information affecting work that should trigger a long-polling update
126         current_work = variable.Variable(None)
127         # information affecting work that should not trigger a long-polling update
128         current_work2 = variable.Variable(None)
129         
130         requested = expiring_dict.ExpiringDict(300)
131         
132         @defer.inlineCallbacks
133         def set_real_work1():
134             work = yield getwork(bitcoind)
135             current_work2.set(dict(
136                 time=work['time'],
137                 transactions=work['transactions'],
138                 subsidy=work['subsidy'],
139                 clock_offset=time.time() - work['time'],
140                 last_update=time.time(),
141             )) # second set first because everything hooks on the first
142             pre_current_work.set(dict(
143                 version=work['version'],
144                 previous_block=work['previous_block_hash'],
145                 bits=work['bits'],
146                 coinbaseflags=work['coinbaseflags'],
147             ))
148         
149         def set_real_work2():
150             best, desired = tracker.think(ht, pre_current_work.value['previous_block'])
151             
152             t = dict(pre_current_work.value)
153             t['best_share_hash'] = best
154             t['aux_work'] = pre_merged_work.value
155             current_work.set(t)
156             
157             t = time.time()
158             for peer2, share_hash in desired:
159                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
160                     continue
161                 last_request_time, count = requested.get(share_hash, (None, 0))
162                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
163                     continue
164                 potential_peers = set()
165                 for head in tracker.tails[share_hash]:
166                     potential_peers.update(peer_heads.get(head, set()))
167                 potential_peers = [peer for peer in potential_peers if peer.connected2]
168                 if count == 0 and peer2 is not None and peer2.connected2:
169                     peer = peer2
170                 else:
171                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
172                     if peer is None:
173                         continue
174                 
175                 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
176                 peer.send_getshares(
177                     hashes=[share_hash],
178                     parents=2000,
179                     stops=list(set(tracker.heads) | set(
180                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
181                     ))[:100],
182                 )
183                 requested[share_hash] = t, count + 1
184         pre_current_work.changed.watch(lambda _: set_real_work2())
185         
186         print 'Initializing work...'
187         yield set_real_work1()
188         print '    ...success!'
189         print
190         
191         pre_merged_work.changed.watch(lambda _: set_real_work2())
192         ht.updated.watch(set_real_work2)
193         
194         merged_proxy = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,)) if args.merged_url else None
195         
196         @defer.inlineCallbacks
197         def set_merged_work():
198             while True:
199                 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged_proxy.rpc_getauxblock)()
200                 pre_merged_work.set(dict(
201                     hash=int(auxblock['hash'], 16),
202                     target=pack.IntType(256).unpack(auxblock['target'].decode('hex')),
203                     chain_id=auxblock['chainid'],
204                 ))
205                 yield deferral.sleep(1)
206         if merged_proxy is not None:
207             set_merged_work()
208         
209         @pre_merged_work.changed.watch
210         def _(new_merged_work):
211             print "Got new merged mining work! Difficulty: %f" % (bitcoin_data.target_to_difficulty(new_merged_work['target']),)
212         
213         # setup p2p logic and join p2pool network
214         
215         class Node(p2p.Node):
216             def handle_shares(self, shares, peer):
217                 if len(shares) > 5:
218                     print 'Processing %i shares from %s...' % (len(shares), '%s:%i' % peer.addr if peer is not None else None)
219                 
220                 new_count = 0
221                 for share in shares:
222                     if share.hash in tracker.shares:
223                         #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
224                         continue
225                     
226                     new_count += 1
227                     
228                     #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
229                     
230                     tracker.add(share)
231                 
232                 if shares and peer is not None:
233                     peer_heads.setdefault(shares[0].hash, set()).add(peer)
234                 
235                 if new_count:
236                     set_real_work2()
237                 
238                 if len(shares) > 5:
239                     print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
240             
241             def handle_share_hashes(self, hashes, peer):
242                 t = time.time()
243                 get_hashes = []
244                 for share_hash in hashes:
245                     if share_hash in tracker.shares:
246                         continue
247                     last_request_time, count = requested.get(share_hash, (None, 0))
248                     if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
249                         continue
250                     print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
251                     get_hashes.append(share_hash)
252                     requested[share_hash] = t, count + 1
253                 
254                 if hashes and peer is not None:
255                     peer_heads.setdefault(hashes[0], set()).add(peer)
256                 if get_hashes:
257                     peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
258             
259             def handle_get_shares(self, hashes, parents, stops, peer):
260                 parents = min(parents, 1000//len(hashes))
261                 stops = set(stops)
262                 shares = []
263                 for share_hash in hashes:
264                     for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))):
265                         if share.hash in stops:
266                             break
267                         shares.append(share)
268                 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
269                 peer.sendShares(shares)
270         
271         @tracker.verified.added.watch
272         def _(share):
273             if share.pow_hash <= share.header['bits'].target:
274                 if factory.conn.value is not None:
275                     factory.conn.value.send_block(block=share.as_block(tracker))
276                 else:
277                     print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Erp!'
278                 print
279                 print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %x' % (p2pool_data.format_hash(share.hash), share.header_hash)
280                 print
281                 recent_blocks.append({ 'ts': share.timestamp, 'hash': '%x' % (share.header_hash) })
282         
283         print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
284         
285         @defer.inlineCallbacks
286         def parse(x):
287             if ':' in x:
288                 ip, port = x.split(':')
289                 defer.returnValue(((yield reactor.resolve(ip)), int(port)))
290             else:
291                 defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT))
292         
293         addrs = {}
294         if os.path.exists(os.path.join(datadir_path, 'addrs.txt')):
295             try:
296                 addrs.update(dict(eval(x) for x in open(os.path.join(datadir_path, 'addrs.txt'))))
297             except:
298                 print >>sys.stderr, "error reading addrs"
299         for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
300             try:
301                 addr = yield addr_df
302                 if addr not in addrs:
303                     addrs[addr] = (0, time.time(), time.time())
304             except:
305                 log.err()
306         
307         connect_addrs = set()
308         for addr_df in map(parse, args.p2pool_nodes):
309             try:
310                 connect_addrs.add((yield addr_df))
311             except:
312                 log.err()
313         
314         p2p_node = Node(
315             best_share_hash_func=lambda: current_work.value['best_share_hash'],
316             port=args.p2pool_port,
317             net=net,
318             addr_store=addrs,
319             connect_addrs=connect_addrs,
320         )
321         p2p_node.start()
322         
323         def save_addrs():
324             open(os.path.join(datadir_path, 'addrs.txt'), 'w').writelines(repr(x) + '\n' for x in p2p_node.addr_store.iteritems())
325         task.LoopingCall(save_addrs).start(60)
326         
327         # send share when the chain changes to their chain
328         def work_changed(new_work):
329             #print 'Work changed:', new_work
330             shares = []
331             for share in tracker.get_chain(new_work['best_share_hash'], tracker.get_height(new_work['best_share_hash'])):
332                 if share.hash in shared_share_hashes:
333                     break
334                 shared_share_hashes.add(share.hash)
335                 shares.append(share)
336             
337             for peer in p2p_node.peers.itervalues():
338                 peer.sendShares([share for share in shares if share.peer is not peer])
339         
340         current_work.changed.watch(work_changed)
341         
342         def save_shares():
343             for share in tracker.get_chain(current_work.value['best_share_hash'], min(tracker.get_height(current_work.value['best_share_hash']), 2*net.CHAIN_LENGTH)):
344                 ss.add_share(share)
345                 if share.hash in tracker.verified.shares:
346                     ss.add_verified_hash(share.hash)
347         task.LoopingCall(save_shares).start(60)
348         
349         print '    ...success!'
350         print
351         
352         @defer.inlineCallbacks
353         def upnp_thread():
354             while True:
355                 try:
356                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
357                     if is_lan:
358                         pm = yield portmapper.get_port_mapper()
359                         yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
360                 except defer.TimeoutError:
361                     pass
362                 except:
363                     if p2pool.DEBUG:
364                         log.err(None, "UPnP error:")
365                 yield deferral.sleep(random.expovariate(1/120))
366         
367         if args.upnp:
368             upnp_thread()
369         
370         # start listening for workers with a JSON-RPC server
371         
372         print 'Listening for workers on port %i...' % (args.worker_port,)
373         
374         if os.path.exists(os.path.join(datadir_path, 'vip_pass')):
375             with open(os.path.join(datadir_path, 'vip_pass'), 'rb') as f:
376                 vip_pass = f.read().strip('\r\n')
377         else:
378             vip_pass = '%016x' % (random.randrange(2**64),)
379             with open(os.path.join(datadir_path, 'vip_pass'), 'wb') as f:
380                 f.write(vip_pass)
381         print '    Worker password:', vip_pass, '(only required for generating graphs)'
382         
383         # setup worker logic
384         
385         removed_unstales_var = variable.Variable((0, 0, 0))
386         @tracker.verified.removed.watch
387         def _(share):
388             if share.hash in my_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
389                 assert share.share_data['stale_info'] in [0, 253, 254] # we made these shares in this instance
390                 removed_unstales_var.set((
391                     removed_unstales_var.value[0] + 1,
392                     removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 253 else 0),
393                     removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 254 else 0),
394                 ))
395         
396         removed_doa_unstales_var = variable.Variable(0)
397         @tracker.verified.removed.watch
398         def _(share):
399             if share.hash in my_doa_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
400                 removed_doa_unstales.set(removed_doa_unstales.value + 1)
401         
402         def get_stale_counts():
403             '''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)'''
404             my_shares = len(my_share_hashes)
405             my_doa_shares = len(my_doa_share_hashes)
406             delta = tracker.verified.get_delta(current_work.value['best_share_hash'])
407             my_shares_in_chain = delta.my_count + removed_unstales_var.value[0]
408             my_doa_shares_in_chain = delta.my_doa_count + removed_doa_unstales_var.value
409             orphans_recorded_in_chain = delta.my_orphan_announce_count + removed_unstales_var.value[1]
410             doas_recorded_in_chain = delta.my_dead_announce_count + removed_unstales_var.value[2]
411             
412             my_shares_not_in_chain = my_shares - my_shares_in_chain
413             my_doa_shares_not_in_chain = my_doa_shares - my_doa_shares_in_chain
414             
415             return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
416         
417         class WorkerBridge(worker_interface.WorkerBridge):
418             def __init__(self):
419                 worker_interface.WorkerBridge.__init__(self)
420                 self.new_work_event = current_work.changed
421                 
422                 self.merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
423                 self.recent_shares_ts_work = []
424             
425             def _get_payout_script_from_username(self, user):
426                 if user is None:
427                     return None
428                 try:
429                     pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, net.PARENT)
430                 except: # XXX blah
431                     return None
432                 return bitcoin_data.pubkey_hash_to_script2(pubkey_hash)
433             
434             def preprocess_request(self, request):
435                 payout_script = self._get_payout_script_from_username(request.getUser())
436                 if payout_script is None or random.uniform(0, 100) < args.worker_fee:
437                     payout_script = my_script
438                 return payout_script,
439             
440             def get_work(self, payout_script):
441                 if len(p2p_node.peers) == 0 and net.PERSIST:
442                     raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
443                 if current_work.value['best_share_hash'] is None and net.PERSIST:
444                     raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
445                 if time.time() > current_work2.value['last_update'] + 60:
446                     raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
447                 
448                 share_info, generate_tx = p2pool_data.generate_transaction(
449                     tracker=tracker,
450                     share_data=dict(
451                         previous_share_hash=current_work.value['best_share_hash'],
452                         coinbase=(('' if current_work.value['aux_work'] is None else
453                             '\xfa\xbemm' + pack.IntType(256, 'big').pack(current_work.value['aux_work']['hash']) + struct.pack('<ii', 1, 0)) + current_work.value['coinbaseflags'])[:100],
454                         nonce=struct.pack('<Q', random.randrange(2**64)),
455                         new_script=payout_script,
456                         subsidy=current_work2.value['subsidy'],
457                         donation=math.perfect_round(65535*args.donation_percentage/100),
458                         stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
459                             253 if orphans > orphans_recorded_in_chain else
460                             254 if doas > doas_recorded_in_chain else
461                             0
462                         )(*get_stale_counts()),
463                     ),
464                     block_target=current_work.value['bits'].target,
465                     desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
466                     net=net,
467                 )
468                 
469                 target = 2**256//2**32 - 1
470                 if len(self.recent_shares_ts_work) == 50:
471                     hash_rate = sum(work for ts, work in self.recent_shares_ts_work)//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
472                     target = min(target, 2**256//(hash_rate * 5))
473                 target = max(target, share_info['bits'].target)
474                 if current_work.value['aux_work']:
475                     target = max(target, current_work.value['aux_work']['target'])
476                 
477                 transactions = [generate_tx] + list(current_work2.value['transactions'])
478                 merkle_root = bitcoin_data.merkle_hash([bitcoin_data.hash256(bitcoin_data.tx_type.pack(x)) for x in transactions])
479                 self.merkle_root_to_transactions[merkle_root] = share_info, transactions, time.time(), current_work.value['aux_work'], target
480                 
481                 print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (
482                     bitcoin_data.target_to_difficulty(target),
483                     bitcoin_data.target_to_difficulty(share_info['bits'].target),
484                     (sum(t['value'] for t in generate_tx['tx_outs'] if t['script'] == payout_script) - current_work2.value['subsidy']//200)*1e-8, net.PARENT.SYMBOL,
485                     current_work2.value['subsidy']*1e-8, net.PARENT.SYMBOL,
486                     len(current_work2.value['transactions']),
487                 )
488                 
489                 return bitcoin_getwork.BlockAttempt(
490                     version=current_work.value['version'],
491                     previous_block=current_work.value['previous_block'],
492                     merkle_root=merkle_root,
493                     timestamp=current_work2.value['time'],
494                     bits=current_work.value['bits'],
495                     share_target=target,
496                 )
497             
498             def got_response(self, header, request):
499                 # match up with transactions
500                 if header['merkle_root'] not in self.merkle_root_to_transactions:
501                     print >>sys.stderr, '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
502                     return False
503                 share_info, transactions, getwork_time, aux_work, target = self.merkle_root_to_transactions[header['merkle_root']]
504                 
505                 pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
506                 on_time = current_work.value['best_share_hash'] == share_info['share_data']['previous_share_hash']
507                 
508                 try:
509                     if pow_hash <= header['bits'].target or p2pool.DEBUG:
510                         if factory.conn.value is not None:
511                             factory.conn.value.send_block(block=dict(header=header, txs=transactions))
512                         else:
513                             print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Erp!'
514                         if pow_hash <= header['bits'].target:
515                             print
516                             print 'GOT BLOCK FROM MINER! Passing to bitcoind! bitcoin: %x' % (bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)),)
517                             print
518                             recent_blocks.append({ 'ts': time.time(), 'hash': '%x' % (bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)),) })
519                 except:
520                     log.err(None, 'Error while processing potential block:')
521                 
522                 try:
523                     if aux_work is not None and (pow_hash <= aux_work['target'] or p2pool.DEBUG):
524                         assert pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex') == transactions[0]['tx_ins'][0]['script'][4:4+32].encode('hex')
525                         df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(merged_proxy.rpc_getauxblock)(
526                             pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
527                             bitcoin_data.aux_pow_type.pack(dict(
528                                 merkle_tx=dict(
529                                     tx=transactions[0],
530                                     block_hash=bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)),
531                                     merkle_branch=bitcoin_data.calculate_merkle_branch([bitcoin_data.hash256(bitcoin_data.tx_type.pack(x)) for x in transactions], 0),
532                                     index=0,
533                                 ),
534                                 merkle_branch=[],
535                                 index=0,
536                                 parent_block_header=header,
537                             )).encode('hex'),
538                         )
539                         @df.addCallback
540                         def _(result):
541                             if result != (pow_hash <= aux_work['target']):
542                                 print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
543                             else:
544                                 print 'Merged block submittal result: %s' % (result,)
545                         @df.addErrback
546                         def _(err):
547                             log.err(err, 'Error submitting merged block:')
548                 except:
549                     log.err(None, 'Error while processing merged mining POW:')
550                 
551                 if pow_hash <= share_info['bits'].target:
552                     share = p2pool_data.Share(net, header, share_info, other_txs=transactions[1:])
553                     print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
554                         request.getUser(),
555                         p2pool_data.format_hash(share.hash),
556                         p2pool_data.format_hash(share.previous_hash),
557                         time.time() - getwork_time,
558                         ' DEAD ON ARRIVAL' if not on_time else '',
559                     )
560                     my_share_hashes.add(share.hash)
561                     if not on_time:
562                         my_doa_share_hashes.add(share.hash)
563                     p2p_node.handle_shares([share], None)
564                     try:
565                         if pow_hash <= header['bits'].target:
566                             for peer in p2p_node.peers.itervalues():
567                                 peer.sendShares([share])
568                             shared_share_hashes.add(share.hash)
569                     except:
570                         log.err(None, 'Error forwarding block solution:')
571                 
572                 if pow_hash <= target:
573                     reactor.callLater(1, grapher.add_localrate_point, bitcoin_data.target_to_average_attempts(target), not on_time)
574                     if request.getPassword() == vip_pass:
575                         reactor.callLater(1, grapher.add_localminer_point, request.getUser(), bitcoin_data.target_to_average_attempts(target), not on_time)
576                     self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
577                     while len(self.recent_shares_ts_work) > 50:
578                         self.recent_shares_ts_work.pop(0)
579                 
580                 if pow_hash > target:
581                     print 'Worker submitted share with hash > target:'
582                     print '    Hash:   %56x' % (pow_hash,)
583                     print '    Target: %56x' % (target,)
584                 
585                 return on_time
586         
587         web_root = resource.Resource()
588         worker_interface.WorkerInterface(WorkerBridge()).attach_to(web_root)
589         
590         def get_rate():
591             if tracker.get_height(current_work.value['best_share_hash']) < 720:
592                 return json.dumps(None)
593             return json.dumps(p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
594                 / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720)))
595         
596         def get_users():
597             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
598             weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
599             res = {}
600             for script in sorted(weights, key=lambda s: weights[s]):
601                 res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
602             return json.dumps(res)
603         
604         def get_current_txouts():
605             wb = WorkerBridge()
606             tmp_tag = str(random.randrange(2**64))
607             outputs = wb.merkle_root_to_transactions[wb.get_work(tmp_tag).merkle_root][1][0]['tx_outs']
608             total = sum(out['value'] for out in outputs)
609             total_without_tag = sum(out['value'] for out in outputs if out['script'] != tmp_tag)
610             total_diff = total - total_without_tag
611             return dict((out['script'], out['value'] + math.perfect_round(out['value']*total_diff/total)) for out in outputs if out['script'] != tmp_tag and out['value'])
612         
613         def get_current_scaled_txouts(scale, trunc=0):
614             txouts = get_current_txouts()
615             total = sum(txouts.itervalues())
616             results = dict((script, value*scale//total) for script, value in txouts.iteritems())
617             if trunc > 0:
618                 total_random = 0
619                 random_set = set()
620                 for s in sorted(results, key=results.__getitem__):
621                     total_random += results[s]
622                     random_set.add(s)
623                     if total_random >= trunc and results[s] >= trunc:
624                         break
625                 winner = math.weighted_choice((script, results[script]) for script in random_set)
626                 for script in random_set:
627                     del results[script]
628                 results[winner] = total_random
629             if sum(results.itervalues()) < int(scale):
630                 results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
631             return results
632         
633         def get_current_payouts():
634             return json.dumps(dict((bitcoin_data.script2_to_human(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems()))
635         
636         def get_patron_sendmany(this):
637             try:
638                 if '/' in this:
639                     this, trunc = this.split('/', 1)
640                 else:
641                     trunc = '0.01'
642                 return json.dumps(dict(
643                     (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
644                     for script, value in get_current_scaled_txouts(scale=int(float(this)*1e8), trunc=int(float(trunc)*1e8)).iteritems()
645                     if bitcoin_data.script2_to_address(script, net.PARENT) is not None
646                 ))
647             except:
648                 return json.dumps(None)
649         
650         def get_global_stats():
651             # averaged over last hour
652             lookbehind = 3600//net.SHARE_PERIOD
653             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
654                 return None
655             
656             nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
657             stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
658             return json.dumps(dict(
659                 pool_nonstale_hash_rate=nonstale_hash_rate,
660                 pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
661                 pool_stale_prop=stale_prop,
662             ))
663         
664         def get_local_stats():
665             lookbehind = 3600//net.SHARE_PERIOD
666             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
667                 return None
668             
669             global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
670             
671             my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
672             my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
673             my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
674             my_share_count = my_unstale_count + my_orphan_count + my_doa_count
675             my_stale_count = my_orphan_count + my_doa_count
676             
677             my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
678             
679             my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
680                 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1)
681                 if share.hash in my_share_hashes)
682             actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp -
683                 tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp)
684             share_att_s = my_work / actual_time
685             
686             return json.dumps(dict(
687                 my_hash_rates_in_last_hour=dict(
688                     nonstale=share_att_s,
689                     rewarded=share_att_s/(1 - global_stale_prop),
690                     actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
691                 ),
692                 my_share_counts_in_last_hour=dict(
693                     shares=my_share_count,
694                     unstale_shares=my_unstale_count,
695                     stale_shares=my_stale_count,
696                     orphan_stale_shares=my_orphan_count,
697                     doa_stale_shares=my_doa_count,
698                 ),
699                 my_stale_proportions_in_last_hour=dict(
700                     stale=my_stale_prop,
701                     orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
702                     dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
703                 ),
704             ))
705         
706         def get_peer_addresses():
707             return ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues())
708         
709         class WebInterface(resource.Resource):
710             def __init__(self, func, mime_type, *fields):
711                 self.func, self.mime_type, self.fields = func, mime_type, fields
712             
713             def render_GET(self, request):
714                 request.setHeader('Content-Type', self.mime_type)
715                 request.setHeader('Access-Control-Allow-Origin', '*')
716                 return self.func(*(request.args[field][0] for field in self.fields))
717         
718         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
719         web_root.putChild('users', WebInterface(get_users, 'application/json'))
720         web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
721         web_root.putChild('current_payouts', WebInterface(get_current_payouts, 'application/json'))
722         web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain', 'total'))
723         web_root.putChild('global_stats', WebInterface(get_global_stats, 'application/json'))
724         web_root.putChild('local_stats', WebInterface(get_local_stats, 'application/json'))
725         web_root.putChild('peer_addresses', WebInterface(get_peer_addresses, 'text/plain'))
726         web_root.putChild('payout_addr', WebInterface(lambda: json.dumps(bitcoin_data.script2_to_human(my_script, net.PARENT)), 'application/json'))
727         web_root.putChild('recent_blocks', WebInterface(lambda: json.dumps(recent_blocks), 'application/json'))
728         if draw is not None:
729             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
730         
731         grapher = graphs.Grapher(os.path.join(datadir_path, 'rrd'))
732         web_root.putChild('graphs', grapher.get_resource())
733         def add_point():
734             if tracker.get_height(current_work.value['best_share_hash']) < 720:
735                 return
736             nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
737             poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
738             grapher.add_poolrate_point(poolrate, poolrate - nonstalerate)
739         task.LoopingCall(add_point).start(100)
740         
741         reactor.listenTCP(args.worker_port, server.Site(web_root))
742         
743         print '    ...success!'
744         print
745         
746         
747         @defer.inlineCallbacks
748         def work_poller():
749             while True:
750                 flag = factory.new_block.get_deferred()
751                 try:
752                     yield set_real_work1()
753                 except:
754                     log.err()
755                 yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
756         work_poller()
757         
758         
759         # done!
760         print 'Started successfully!'
761         print
762         
763         
764         if hasattr(signal, 'SIGALRM'):
765             signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
766                 sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
767             ))
768             signal.siginterrupt(signal.SIGALRM, False)
769             task.LoopingCall(signal.alarm, 30).start(1)
770         
771         if args.irc_announce:
772             from twisted.words.protocols import irc
773             class IRCClient(irc.IRCClient):
774                 nickname = 'p2pool'
775                 def lineReceived(self, line):
776                     print repr(line)
777                     irc.IRCClient.lineReceived(self, line)
778                 def signedOn(self):
779                     irc.IRCClient.signedOn(self)
780                     self.factory.resetDelay()
781                     self.join('#p2pool')
782                     self.watch_id = tracker.verified.added.watch(self._new_share)
783                     self.announced_hashes = set()
784                 def _new_share(self, share):
785                     if share.pow_hash <= share.header['bits'].target and share.header_hash not in self.announced_hashes:
786                         self.announced_hashes.add(share.header_hash)
787                         self.say('#p2pool', '\x032,1BLOCK FOUND by %s! http://blockexplorer.com/block/%064x' % (bitcoin_data.script2_to_human(share.new_script, net.PARENT), share.header_hash))
788                 def connectionLost(self, reason):
789                     tracker.verified.added.unwatch(self.watch_id)
790             class IRCClientFactory(protocol.ReconnectingClientFactory):
791                 protocol = IRCClient
792             reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())
793         
794         @defer.inlineCallbacks
795         def status_thread():
796             last_str = None
797             last_time = 0
798             while True:
799                 yield deferral.sleep(3)
800                 try:
801                     if time.time() > current_work2.value['last_update'] + 60:
802                         print >>sys.stderr, '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
803                     if current_work.value['best_share_hash'] is not None:
804                         height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
805                         if height > 2:
806                             att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720))
807                             weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
808                             (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
809                             stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], min(720, height))
810                             real_att_s = att_s / (1 - stale_prop)
811                             my_att_s = real_att_s*weights.get(my_script, 0)/total_weight
812                             this_str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i (%i incoming)' % (
813                                 math.format(int(real_att_s)),
814                                 height,
815                                 len(tracker.verified.shares),
816                                 len(tracker.shares),
817                                 weights.get(my_script, 0)/total_weight*100,
818                                 math.format(int(my_att_s)),
819                                 shares,
820                                 stale_orphan_shares,
821                                 stale_doa_shares,
822                                 len(p2p_node.peers),
823                                 sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
824                             ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
825                             this_str += '\nAverage time between blocks: %.2f days' % (
826                                 2**256 / current_work.value['bits'].target / real_att_s / (60 * 60 * 24),
827                             )
828                             this_str += '\nPool stales: %i%% Own: %s Own efficiency: %s' % (
829                                 int(100*stale_prop+.5),
830                                 math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
831                                 math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
832                             )
833                             if this_str != last_str or time.time() > last_time + 15:
834                                 print this_str
835                                 last_str = this_str
836                                 last_time = time.time()
837                 except:
838                     log.err()
839         status_thread()
840     except:
841         log.err(None, 'Fatal error:')
842
843 def run():
844     class FixedArgumentParser(argparse.ArgumentParser):
845         def _read_args_from_files(self, arg_strings):
846             # expand arguments referencing files
847             new_arg_strings = []
848             for arg_string in arg_strings:
849                 
850                 # for regular arguments, just add them back into the list
851                 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
852                     new_arg_strings.append(arg_string)
853                 
854                 # replace arguments referencing files with the file content
855                 else:
856                     try:
857                         args_file = open(arg_string[1:])
858                         try:
859                             arg_strings = []
860                             for arg_line in args_file.read().splitlines():
861                                 for arg in self.convert_arg_line_to_args(arg_line):
862                                     arg_strings.append(arg)
863                             arg_strings = self._read_args_from_files(arg_strings)
864                             new_arg_strings.extend(arg_strings)
865                         finally:
866                             args_file.close()
867                     except IOError:
868                         err = sys.exc_info()[1]
869                         self.error(str(err))
870             
871             # return the modified argument list
872             return new_arg_strings
873         
874         def convert_arg_line_to_args(self, arg_line):
875             return [arg for arg in arg_line.split() if arg.strip()]
876     
877     parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
878     parser.add_argument('--version', action='version', version=p2pool.__version__)
879     parser.add_argument('--net',
880         help='use specified network (default: bitcoin)',
881         action='store', choices=sorted(networks.realnets), default='bitcoin', dest='net_name')
882     parser.add_argument('--testnet',
883         help='''use the network's testnet''',
884         action='store_const', const=True, default=False, dest='testnet')
885     parser.add_argument('--debug',
886         help='enable debugging mode',
887         action='store_const', const=True, default=False, dest='debug')
888     parser.add_argument('-a', '--address',
889         help='generate payouts to this address (default: <address requested from bitcoind>)',
890         type=str, action='store', default=None, dest='address')
891     parser.add_argument('--logfile',
892         help='''log to this file (default: data/<NET>/log)''',
893         type=str, action='store', default=None, dest='logfile')
894     parser.add_argument('--merged-url',
895         help='call getauxblock on this url to get work for merged mining (example: http://127.0.0.1:10332/)',
896         type=str, action='store', default=None, dest='merged_url')
897     parser.add_argument('--merged-userpass',
898         help='use this user and password when requesting merged mining work (example: ncuser:ncpass)',
899         type=str, action='store', default=None, dest='merged_userpass')
900     parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
901         help='donate this percentage of work to author of p2pool (default: 0.5)',
902         type=float, action='store', default=0.5, dest='donation_percentage')
903     parser.add_argument('--irc-announce',
904         help='announce any blocks found on irc://irc.freenode.net/#p2pool',
905         action='store_true', default=False, dest='irc_announce')
906     
907     p2pool_group = parser.add_argument_group('p2pool interface')
908     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
909         help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.P2P_PORT) for _, n in sorted(networks.realnets.items())),
910         type=int, action='store', default=None, dest='p2pool_port')
911     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
912         help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
913         type=str, action='append', default=[], dest='p2pool_nodes')
914     parser.add_argument('--disable-upnp',
915         help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
916         action='store_false', default=True, dest='upnp')
917     
918     worker_group = parser.add_argument_group('worker interface')
919     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
920         help='listen on PORT for RPC connections from miners (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.WORKER_PORT) for _, n in sorted(networks.realnets.items())),
921         type=int, action='store', default=None, dest='worker_port')
922     worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
923         help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
924         type=float, action='store', default=0, dest='worker_fee')
925     
926     bitcoind_group = parser.add_argument_group('bitcoind interface')
927     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
928         help='connect to this address (default: 127.0.0.1)',
929         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
930     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
931         help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (n.NAME, n.PARENT.RPC_PORT) for _, n in sorted(networks.realnets.items())),
932         type=int, action='store', default=None, dest='bitcoind_rpc_port')
933     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
934         help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (n.NAME, n.PARENT.P2P_PORT) for _, n in sorted(networks.realnets.items())),
935         type=int, action='store', default=None, dest='bitcoind_p2p_port')
936     
937     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
938         help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
939         type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
940     
941     args = parser.parse_args()
942     
943     if args.debug:
944         p2pool.DEBUG = True
945     
946     net = networks.nets[args.net_name + ('_testnet' if args.testnet else '')]
947     
948     datadir_path = os.path.join(os.path.dirname(sys.argv[0]), 'data', net.NAME)
949     if not os.path.exists(datadir_path):
950         os.makedirs(datadir_path)
951     
952     if len(args.bitcoind_rpc_userpass) > 2:
953         parser.error('a maximum of two arguments are allowed')
954     args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
955     
956     if args.bitcoind_rpc_password is None:
957         if not hasattr(net, 'CONF_FILE_FUNC'):
958             parser.error('This network has no configuration file function. Manually enter your RPC password.')
959         conf_path = net.CONF_FILE_FUNC()
960         if not os.path.exists(conf_path):
961             parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
962                 '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
963                 '''\r\n'''
964                 '''server=true\r\n'''
965                 '''rpcpassword=%x # (randomly generated for your convenience)''' % (conf_path, random.randrange(2**128)))
966         with open(conf_path, 'rb') as f:
967             cp = ConfigParser.RawConfigParser()
968             cp.readfp(StringIO.StringIO('[x]\r\n' + f.read()))
969             for conf_name, var_name, var_type in [
970                 ('rpcuser', 'bitcoind_rpc_username', str),
971                 ('rpcpassword', 'bitcoind_rpc_password', str),
972                 ('rpcport', 'bitcoind_rpc_port', int),
973                 ('port', 'bitcoind_p2p_port', int),
974             ]:
975                 if getattr(args, var_name) is None and cp.has_option('x', conf_name):
976                     setattr(args, var_name, var_type(cp.get('x', conf_name)))
977     
978     if args.bitcoind_rpc_username is None:
979         args.bitcoind_rpc_username = ''
980     
981     if args.bitcoind_rpc_port is None:
982         args.bitcoind_rpc_port = net.PARENT.RPC_PORT
983     
984     if args.bitcoind_p2p_port is None:
985         args.bitcoind_p2p_port = net.PARENT.P2P_PORT
986     
987     if args.p2pool_port is None:
988         args.p2pool_port = net.P2P_PORT
989     
990     if args.worker_port is None:
991         args.worker_port = net.WORKER_PORT
992     
993     if args.address is not None:
994         try:
995             args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
996         except Exception, e:
997             parser.error('error parsing address: ' + repr(e))
998     else:
999         args.pubkey_hash = None
1000     
1001     if (args.merged_url is None) ^ (args.merged_userpass is None):
1002         parser.error('must specify --merged-url and --merged-userpass')
1003     
1004     
1005     if args.logfile is None:
1006         args.logfile = os.path.join(datadir_path, 'log')
1007     
1008     logfile = logging.LogFile(args.logfile)
1009     pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
1010     sys.stdout = logging.AbortPipe(pipe)
1011     sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
1012     if hasattr(signal, "SIGUSR1"):
1013         def sigusr1(signum, frame):
1014             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
1015             logfile.reopen()
1016             print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
1017         signal.signal(signal.SIGUSR1, sigusr1)
1018     task.LoopingCall(logfile.reopen).start(5)
1019     
1020     reactor.callWhenRunning(main, args, net, datadir_path)
1021     reactor.run()