1a074415c4c6ddd23693267f008608275bebb9ef
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2 # coding=utf-8
3
4 from __future__ import division
5
6 import ConfigParser
7 import StringIO
8 import argparse
9 import os
10 import random
11 import struct
12 import sys
13 import time
14 import json
15 import signal
16 import traceback
17
18 from twisted.internet import defer, reactor, protocol, task
19 from twisted.web import server, resource
20 from twisted.python import log
21 from nattraverso import portmapper, ipdiscover
22
23 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
24 from bitcoin import worker_interface
25 from util import expiring_dict, jsonrpc, variable, deferral, math, logging
26 from . import p2p, networks, graphs
27 import p2pool, p2pool.data as p2pool_data
28
29 @deferral.retry('Error getting work from bitcoind:', 3)
30 @defer.inlineCallbacks
31 def getwork(bitcoind):
32     work = yield bitcoind.rpc_getmemorypool()
33     defer.returnValue(dict(
34         version=work['version'],
35         previous_block_hash=int(work['previousblockhash'], 16),
36         transactions=[bitcoin_data.tx_type.unpack(x.decode('hex')) for x in work['transactions']],
37         subsidy=work['coinbasevalue'],
38         time=work['time'],
39         bits=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
40         coinbaseflags=work['coinbaseflags'].decode('hex') if 'coinbaseflags' in work else ''.join(x.decode('hex') for x in work['coinbaseaux'].itervalues()) if 'coinbaseaux' in work else '',
41     ))
42
43 @defer.inlineCallbacks
44 def main(args, net, datadir_path):
45     try:
46         print 'p2pool (version %s)' % (p2pool.__version__,)
47         print
48         try:
49             from . import draw
50         except ImportError:
51             draw = None
52             print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
53             print
54         
55         # connect to bitcoind over JSON-RPC and do initial getmemorypool
56         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
57         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
58         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
59         good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.PARENT.RPC_CHECK)(bitcoind)
60         if not good:
61             print >>sys.stderr, "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
62             return
63         temp_work = yield getwork(bitcoind)
64         print '    ...success!'
65         print '    Current block hash: %x' % (temp_work['previous_block_hash'],)
66         print
67         
68         # connect to bitcoind over bitcoin-p2p
69         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
70         factory = bitcoin_p2p.ClientFactory(net.PARENT)
71         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
72         yield factory.getProtocol() # waits until handshake is successful
73         print '    ...success!'
74         print
75         
76         if args.pubkey_hash is None:
77             print 'Getting payout address from bitcoind...'
78             my_script = yield deferral.retry('Error getting payout address from bitcoind:', 5)(defer.inlineCallbacks(lambda: defer.returnValue(
79                 bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net.PARENT)))
80             ))()
81         else:
82             print 'Computing payout script from provided address....'
83             my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
84         print '    ...success!'
85         print '    Payout script:', bitcoin_data.script2_to_human(my_script, net.PARENT)
86         print
87         
88         ht = bitcoin_p2p.HeightTracker(bitcoind, factory)
89         
90         my_share_hashes = set()
91         my_doa_share_hashes = set()
92         
93         tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes)
94         shared_share_hashes = set()
95         ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
96         known_verified = set()
97         recent_blocks = []
98         print "Loading shares..."
99         for i, (mode, contents) in enumerate(ss.get_shares()):
100             if mode == 'share':
101                 if contents.hash in tracker.shares:
102                     continue
103                 shared_share_hashes.add(contents.hash)
104                 contents.time_seen = 0
105                 tracker.add(contents)
106                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
107                     print "    %i" % (len(tracker.shares),)
108             elif mode == 'verified_hash':
109                 known_verified.add(contents)
110             else:
111                 raise AssertionError()
112         print "    ...inserting %i verified shares..." % (len(known_verified),)
113         for h in known_verified:
114             if h not in tracker.shares:
115                 ss.forget_verified_share(h)
116                 continue
117             tracker.verified.add(tracker.shares[h])
118         print "    ...done loading %i shares!" % (len(tracker.shares),)
119         print
120         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
121         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
122         tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
123         
124         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
125         
126         pre_current_work = variable.Variable(None)
127         pre_merged_work = variable.Variable(None)
128         # information affecting work that should trigger a long-polling update
129         current_work = variable.Variable(None)
130         # information affecting work that should not trigger a long-polling update
131         current_work2 = variable.Variable(None)
132         
133         requested = expiring_dict.ExpiringDict(300)
134         
135         @defer.inlineCallbacks
136         def set_real_work1():
137             work = yield getwork(bitcoind)
138             current_work2.set(dict(
139                 time=work['time'],
140                 transactions=work['transactions'],
141                 subsidy=work['subsidy'],
142                 clock_offset=time.time() - work['time'],
143                 last_update=time.time(),
144             )) # second set first because everything hooks on the first
145             pre_current_work.set(dict(
146                 version=work['version'],
147                 previous_block=work['previous_block_hash'],
148                 bits=work['bits'],
149                 coinbaseflags=work['coinbaseflags'],
150             ))
151         
152         def set_real_work2():
153             best, desired = tracker.think(ht, pre_current_work.value['previous_block'])
154             
155             t = dict(pre_current_work.value)
156             t['best_share_hash'] = best
157             t['aux_work'] = pre_merged_work.value
158             current_work.set(t)
159             
160             t = time.time()
161             for peer2, share_hash in desired:
162                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
163                     continue
164                 last_request_time, count = requested.get(share_hash, (None, 0))
165                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
166                     continue
167                 potential_peers = set()
168                 for head in tracker.tails[share_hash]:
169                     potential_peers.update(peer_heads.get(head, set()))
170                 potential_peers = [peer for peer in potential_peers if peer.connected2]
171                 if count == 0 and peer2 is not None and peer2.connected2:
172                     peer = peer2
173                 else:
174                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
175                     if peer is None:
176                         continue
177                 
178                 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
179                 peer.send_getshares(
180                     hashes=[share_hash],
181                     parents=2000,
182                     stops=list(set(tracker.heads) | set(
183                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
184                     ))[:100],
185                 )
186                 requested[share_hash] = t, count + 1
187         pre_current_work.changed.watch(lambda _: set_real_work2())
188         
189         print 'Initializing work...'
190         yield set_real_work1()
191         print '    ...success!'
192         print
193         
194         pre_merged_work.changed.watch(lambda _: set_real_work2())
195         ht.updated.watch(set_real_work2)
196         
197         merged_proxy = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,)) if args.merged_url else None
198         
199         @defer.inlineCallbacks
200         def set_merged_work():
201             while True:
202                 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged_proxy.rpc_getauxblock)()
203                 pre_merged_work.set(dict(
204                     hash=int(auxblock['hash'], 16),
205                     target=bitcoin_data.IntType(256).unpack(auxblock['target'].decode('hex')),
206                     chain_id=auxblock['chainid'],
207                 ))
208                 yield deferral.sleep(1)
209         if merged_proxy is not None:
210             set_merged_work()
211         
212         @pre_merged_work.changed.watch
213         def _(new_merged_work):
214             print "Got new merged mining work! Difficulty: %f" % (bitcoin_data.target_to_difficulty(new_merged_work['target']),)
215         
216         # setup p2p logic and join p2pool network
217         
218         class Node(p2p.Node):
219             def handle_shares(self, shares, peer):
220                 if len(shares) > 5:
221                     print 'Processing %i shares...' % (len(shares),)
222                 
223                 new_count = 0
224                 for share in shares:
225                     if share.hash in tracker.shares:
226                         #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
227                         continue
228                     
229                     new_count += 1
230                     
231                     #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
232                     
233                     tracker.add(share)
234                 
235                 if shares and peer is not None:
236                     peer_heads.setdefault(shares[0].hash, set()).add(peer)
237                 
238                 if new_count:
239                     set_real_work2()
240                 
241                 if len(shares) > 5:
242                     print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
243             
244             def handle_share_hashes(self, hashes, peer):
245                 t = time.time()
246                 get_hashes = []
247                 for share_hash in hashes:
248                     if share_hash in tracker.shares:
249                         continue
250                     last_request_time, count = requested.get(share_hash, (None, 0))
251                     if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
252                         continue
253                     print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
254                     get_hashes.append(share_hash)
255                     requested[share_hash] = t, count + 1
256                 
257                 if hashes and peer is not None:
258                     peer_heads.setdefault(hashes[0], set()).add(peer)
259                 if get_hashes:
260                     peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
261             
262             def handle_get_shares(self, hashes, parents, stops, peer):
263                 parents = min(parents, 1000//len(hashes))
264                 stops = set(stops)
265                 shares = []
266                 for share_hash in hashes:
267                     for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))):
268                         if share.hash in stops:
269                             break
270                         shares.append(share)
271                 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
272                 peer.sendShares(shares)
273         
274         @tracker.verified.added.watch
275         def _(share):
276             if share.pow_hash <= share.header['bits'].target:
277                 if factory.conn.value is not None:
278                     factory.conn.value.send_block(block=share.as_block(tracker))
279                 else:
280                     print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Erp!'
281                 print
282                 print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %x' % (p2pool_data.format_hash(share.hash), share.header_hash)
283                 print
284                 recent_blocks.append({ 'ts': share.timestamp, 'hash': '%x' % (share.header_hash) })
285         
286         print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
287         
288         @defer.inlineCallbacks
289         def parse(x):
290             if ':' in x:
291                 ip, port = x.split(':')
292                 defer.returnValue(((yield reactor.resolve(ip)), int(port)))
293             else:
294                 defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT))
295         
296         addrs = {}
297         if os.path.exists(os.path.join(datadir_path, 'addrs.txt')):
298             try:
299                 addrs.update(dict(eval(x) for x in open(os.path.join(datadir_path, 'addrs.txt'))))
300             except:
301                 print >>sys.stderr, "error reading addrs"
302         for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
303             try:
304                 addr = yield addr_df
305                 if addr not in addrs:
306                     addrs[addr] = (0, time.time(), time.time())
307             except:
308                 log.err()
309         
310         connect_addrs = set()
311         for addr_df in map(parse, args.p2pool_nodes):
312             try:
313                 connect_addrs.add((yield addr_df))
314             except:
315                 log.err()
316         
317         p2p_node = Node(
318             best_share_hash_func=lambda: current_work.value['best_share_hash'],
319             port=args.p2pool_port,
320             net=net,
321             addr_store=addrs,
322             connect_addrs=connect_addrs,
323         )
324         p2p_node.start()
325         
326         def save_addrs():
327             open(os.path.join(datadir_path, 'addrs.txt'), 'w').writelines(repr(x) + '\n' for x in p2p_node.addr_store.iteritems())
328         task.LoopingCall(save_addrs).start(60)
329         
330         # send share when the chain changes to their chain
331         def work_changed(new_work):
332             #print 'Work changed:', new_work
333             shares = []
334             for share in tracker.get_chain(new_work['best_share_hash'], tracker.get_height(new_work['best_share_hash'])):
335                 if share.hash in shared_share_hashes:
336                     break
337                 shared_share_hashes.add(share.hash)
338                 shares.append(share)
339             
340             for peer in p2p_node.peers.itervalues():
341                 peer.sendShares([share for share in shares if share.peer is not peer])
342         
343         current_work.changed.watch(work_changed)
344         
345         def save_shares():
346             for share in tracker.get_chain(current_work.value['best_share_hash'], min(tracker.get_height(current_work.value['best_share_hash']), 2*net.CHAIN_LENGTH)):
347                 ss.add_share(share)
348                 if share.hash in tracker.verified.shares:
349                     ss.add_verified_hash(share.hash)
350         task.LoopingCall(save_shares).start(60)
351         
352         print '    ...success!'
353         print
354         
355         @defer.inlineCallbacks
356         def upnp_thread():
357             while True:
358                 try:
359                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
360                     if is_lan:
361                         pm = yield portmapper.get_port_mapper()
362                         yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
363                 except defer.TimeoutError:
364                     pass
365                 except:
366                     if p2pool.DEBUG:
367                         log.err(None, "UPnP error:")
368                 yield deferral.sleep(random.expovariate(1/120))
369         
370         if args.upnp:
371             upnp_thread()
372         
373         # start listening for workers with a JSON-RPC server
374         
375         print 'Listening for workers on port %i...' % (args.worker_port,)
376         
377         if os.path.exists(os.path.join(datadir_path, 'vip_pass')):
378             with open(os.path.join(datadir_path, 'vip_pass'), 'rb') as f:
379                 vip_pass = f.read().strip('\r\n')
380         else:
381             vip_pass = '%016x' % (random.randrange(2**64),)
382             with open(os.path.join(datadir_path, 'vip_pass'), 'wb') as f:
383                 f.write(vip_pass)
384         print '    Worker password:', vip_pass, '(only required for generating graphs)'
385         
386         # setup worker logic
387         
388         removed_unstales_var = variable.Variable((0, 0, 0))
389         @tracker.verified.removed.watch
390         def _(share):
391             if share.hash in my_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
392                 assert share.share_data['stale_info'] in [0, 253, 254] # we made these shares in this instance
393                 removed_unstales_var.set((
394                     removed_unstales_var.value[0] + 1,
395                     removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 253 else 0),
396                     removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 254 else 0),
397                 ))
398         
399         removed_doa_unstales_var = variable.Variable(0)
400         @tracker.verified.removed.watch
401         def _(share):
402             if share.hash in my_doa_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
403                 removed_doa_unstales.set(removed_doa_unstales.value + 1)
404         
405         def get_stale_counts():
406             '''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)'''
407             my_shares = len(my_share_hashes)
408             my_doa_shares = len(my_doa_share_hashes)
409             delta = tracker.verified.get_delta(current_work.value['best_share_hash'])
410             my_shares_in_chain = delta.my_count + removed_unstales_var.value[0]
411             my_doa_shares_in_chain = delta.my_doa_count + removed_doa_unstales_var.value
412             orphans_recorded_in_chain = delta.my_orphan_announce_count + removed_unstales_var.value[1]
413             doas_recorded_in_chain = delta.my_dead_announce_count + removed_unstales_var.value[2]
414             
415             my_shares_not_in_chain = my_shares - my_shares_in_chain
416             my_doa_shares_not_in_chain = my_doa_shares - my_doa_shares_in_chain
417             
418             return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
419         
420         class WorkerBridge(worker_interface.WorkerBridge):
421             def __init__(self):
422                 worker_interface.WorkerBridge.__init__(self)
423                 self.new_work_event = current_work.changed
424                 
425                 self.merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
426                 self.recent_shares_ts_work = []
427             
428             def _get_payout_script_from_username(self, user):
429                 if user is None:
430                     return None
431                 try:
432                     pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, net.PARENT)
433                 except: # XXX blah
434                     return None
435                 return bitcoin_data.pubkey_hash_to_script2(pubkey_hash)
436             
437             def preprocess_request(self, request):
438                 payout_script = self._get_payout_script_from_username(request.getUser())
439                 if payout_script is None or random.uniform(0, 100) < args.worker_fee:
440                     payout_script = my_script
441                 return payout_script,
442             
443             def get_work(self, payout_script):
444                 if len(p2p_node.peers) == 0 and net.PERSIST:
445                     raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
446                 if current_work.value['best_share_hash'] is None and net.PERSIST:
447                     raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
448                 if time.time() > current_work2.value['last_update'] + 60:
449                     raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
450                 
451                 share_info, generate_tx = p2pool_data.generate_transaction(
452                     tracker=tracker,
453                     share_data=dict(
454                         previous_share_hash=current_work.value['best_share_hash'],
455                         coinbase=(('' if current_work.value['aux_work'] is None else
456                             '\xfa\xbemm' + bitcoin_data.IntType(256, 'big').pack(current_work.value['aux_work']['hash']) + struct.pack('<ii', 1, 0)) + current_work.value['coinbaseflags'])[:100],
457                         nonce=struct.pack('<Q', random.randrange(2**64)),
458                         new_script=payout_script,
459                         subsidy=current_work2.value['subsidy'],
460                         donation=math.perfect_round(65535*args.donation_percentage/100),
461                         stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
462                             253 if orphans > orphans_recorded_in_chain else
463                             254 if doas > doas_recorded_in_chain else
464                             0
465                         )(*get_stale_counts()),
466                     ),
467                     block_target=current_work.value['bits'].target,
468                     desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
469                     net=net,
470                 )
471                 
472                 target = 2**256//2**32 - 1
473                 if len(self.recent_shares_ts_work) == 50:
474                     hash_rate = sum(work for ts, work in self.recent_shares_ts_work)//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
475                     target = min(target, 2**256//(hash_rate * 5))
476                 target = max(target, share_info['bits'].target)
477                 if current_work.value['aux_work']:
478                     target = max(target, current_work.value['aux_work']['target'])
479                 
480                 transactions = [generate_tx] + list(current_work2.value['transactions'])
481                 merkle_root = bitcoin_data.merkle_hash(map(bitcoin_data.tx_type.hash256, transactions))
482                 self.merkle_root_to_transactions[merkle_root] = share_info, transactions, time.time(), current_work.value['aux_work'], target
483                 
484                 print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (
485                     bitcoin_data.target_to_difficulty(target),
486                     bitcoin_data.target_to_difficulty(share_info['bits'].target),
487                     (sum(t['value'] for t in generate_tx['tx_outs'] if t['script'] == payout_script) - current_work2.value['subsidy']//200)*1e-8, net.PARENT.SYMBOL,
488                     current_work2.value['subsidy']*1e-8, net.PARENT.SYMBOL,
489                     len(current_work2.value['transactions']),
490                 )
491                 
492                 return bitcoin_getwork.BlockAttempt(
493                     version=current_work.value['version'],
494                     previous_block=current_work.value['previous_block'],
495                     merkle_root=merkle_root,
496                     timestamp=current_work2.value['time'],
497                     bits=current_work.value['bits'],
498                     share_target=target,
499                 )
500             
501             def got_response(self, header, request):
502                 # match up with transactions
503                 if header['merkle_root'] not in self.merkle_root_to_transactions:
504                     print >>sys.stderr, '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
505                     return False
506                 share_info, transactions, getwork_time, aux_work, target = self.merkle_root_to_transactions[header['merkle_root']]
507                 
508                 pow_hash = net.PARENT.POW_FUNC(header)
509                 on_time = current_work.value['best_share_hash'] == share_info['share_data']['previous_share_hash']
510                 
511                 try:
512                     if pow_hash <= header['bits'].target or p2pool.DEBUG:
513                         if factory.conn.value is not None:
514                             factory.conn.value.send_block(block=dict(header=header, txs=transactions))
515                         else:
516                             print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Erp!'
517                         if pow_hash <= header['bits'].target:
518                             print
519                             print 'GOT BLOCK FROM MINER! Passing to bitcoind! bitcoin: %x' % (bitcoin_data.block_header_type.hash256(header),)
520                             print
521                             recent_blocks.append({ 'ts': time.time(), 'hash': '%x' % (bitcoin_data.block_header_type.hash256(header),) })
522                 except:
523                     log.err(None, 'Error while processing potential block:')
524                 
525                 try:
526                     if aux_work is not None and (pow_hash <= aux_work['target'] or p2pool.DEBUG):
527                         assert bitcoin_data.IntType(256, 'big').pack(aux_work['hash']).encode('hex') == transactions[0]['tx_ins'][0]['script'][4:4+32].encode('hex')
528                         df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(merged_proxy.rpc_getauxblock)(
529                             bitcoin_data.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
530                             bitcoin_data.aux_pow_type.pack(dict(
531                                 merkle_tx=dict(
532                                     tx=transactions[0],
533                                     block_hash=bitcoin_data.block_header_type.hash256(header),
534                                     merkle_branch=bitcoin_data.calculate_merkle_branch(map(bitcoin_data.tx_type.hash256, transactions), 0),
535                                     index=0,
536                                 ),
537                                 merkle_branch=[],
538                                 index=0,
539                                 parent_block_header=header,
540                             )).encode('hex'),
541                         )
542                         @df.addCallback
543                         def _(result):
544                             if result != (pow_hash <= aux_work['target']):
545                                 print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
546                             else:
547                                 print 'Merged block submittal result: %s' % (result,)
548                         @df.addErrback
549                         def _(err):
550                             log.err(err, 'Error submitting merged block:')
551                 except:
552                     log.err(None, 'Error while processing merged mining POW:')
553                 
554                 if pow_hash <= share_info['bits'].target:
555                     share = p2pool_data.Share(net, header, share_info, other_txs=transactions[1:])
556                     print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
557                         request.getUser(),
558                         p2pool_data.format_hash(share.hash),
559                         p2pool_data.format_hash(share.previous_hash),
560                         time.time() - getwork_time,
561                         ' DEAD ON ARRIVAL' if not on_time else '',
562                     )
563                     my_share_hashes.add(share.hash)
564                     if not on_time:
565                         my_doa_share_hashes.add(share.hash)
566                     p2p_node.handle_shares([share], None)
567                     try:
568                         if pow_hash <= header['bits'].target:
569                             for peer in p2p_node.peers.itervalues():
570                                 peer.sendShares([share])
571                             shared_share_hashes.add(share.hash)
572                     except:
573                         log.err(None, 'Error forwarding block solution:')
574                 
575                 if pow_hash <= target:
576                     reactor.callLater(1, grapher.add_localrate_point, bitcoin_data.target_to_average_attempts(target), not on_time)
577                     if request.getPassword() == vip_pass:
578                         reactor.callLater(1, grapher.add_localminer_point, request.getUser(), bitcoin_data.target_to_average_attempts(target), not on_time)
579                     self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
580                     while len(self.recent_shares_ts_work) > 50:
581                         self.recent_shares_ts_work.pop(0)
582                 
583                 if pow_hash > target:
584                     print 'Worker submitted share with hash > target:'
585                     print '    Hash:   %56x' % (pow_hash,)
586                     print '    Target: %56x' % (target,)
587                 
588                 return on_time
589         
590         web_root = resource.Resource()
591         worker_interface.WorkerInterface(WorkerBridge()).attach_to(web_root)
592         
593         def get_rate():
594             if tracker.get_height(current_work.value['best_share_hash']) < 720:
595                 return json.dumps(None)
596             return json.dumps(p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
597                 / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720)))
598         
599         def get_users():
600             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
601             weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
602             res = {}
603             for script in sorted(weights, key=lambda s: weights[s]):
604                 res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
605             return json.dumps(res)
606         
607         def get_current_txouts():
608             wb = WorkerBridge()
609             tmp_tag = str(random.randrange(2**64))
610             outputs = wb.merkle_root_to_transactions[wb.get_work(tmp_tag).merkle_root][1][0]['tx_outs']
611             total = sum(out['value'] for out in outputs)
612             total_without_tag = sum(out['value'] for out in outputs if out['script'] != tmp_tag)
613             total_diff = total - total_without_tag
614             return dict((out['script'], out['value'] + math.perfect_round(out['value']*total_diff/total)) for out in outputs if out['script'] != tmp_tag and out['value'])
615         
616         def get_current_scaled_txouts(scale, trunc=0):
617             txouts = get_current_txouts()
618             total = sum(txouts.itervalues())
619             results = dict((script, value*scale//total) for script, value in txouts.iteritems())
620             if trunc > 0:
621                 total_random = 0
622                 random_set = set()
623                 for s in sorted(results, key=results.__getitem__):
624                     total_random += results[s]
625                     random_set.add(s)
626                     if total_random >= trunc and results[s] >= trunc:
627                         break
628                 winner = math.weighted_choice((script, results[script]) for script in random_set)
629                 for script in random_set:
630                     del results[script]
631                 results[winner] = total_random
632             if sum(results.itervalues()) < int(scale):
633                 results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
634             return results
635         
636         def get_current_payouts():
637             return json.dumps(dict((bitcoin_data.script2_to_human(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems()))
638         
639         def get_patron_sendmany(this):
640             try:
641                 if '/' in this:
642                     this, trunc = this.split('/', 1)
643                 else:
644                     trunc = '0.01'
645                 return json.dumps(dict(
646                     (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
647                     for script, value in get_current_scaled_txouts(scale=int(float(this)*1e8), trunc=int(float(trunc)*1e8)).iteritems()
648                     if bitcoin_data.script2_to_address(script, net.PARENT) is not None
649                 ))
650             except:
651                 return json.dumps(None)
652         
653         def get_global_stats():
654             # averaged over last hour
655             lookbehind = 3600//net.SHARE_PERIOD
656             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
657                 return None
658             
659             nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
660             stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
661             return json.dumps(dict(
662                 pool_nonstale_hash_rate=nonstale_hash_rate,
663                 pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
664                 pool_stale_prop=stale_prop,
665             ))
666         
667         def get_local_stats():
668             lookbehind = 3600//net.SHARE_PERIOD
669             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
670                 return None
671             
672             global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
673             
674             my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
675             my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
676             my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
677             my_share_count = my_unstale_count + my_orphan_count + my_doa_count
678             my_stale_count = my_orphan_count + my_doa_count
679             
680             my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
681             
682             my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
683                 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1)
684                 if share.hash in my_share_hashes)
685             actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp -
686                 tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp)
687             share_att_s = my_work / actual_time
688             
689             return json.dumps(dict(
690                 my_hash_rates_in_last_hour=dict(
691                     nonstale=share_att_s,
692                     rewarded=share_att_s/(1 - global_stale_prop),
693                     actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
694                 ),
695                 my_share_counts_in_last_hour=dict(
696                     shares=my_share_count,
697                     unstale_shares=my_unstale_count,
698                     stale_shares=my_stale_count,
699                     orphan_stale_shares=my_orphan_count,
700                     doa_stale_shares=my_doa_count,
701                 ),
702                 my_stale_proportions_in_last_hour=dict(
703                     stale=my_stale_prop,
704                     orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
705                     dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
706                 ),
707             ))
708         
709         def get_peer_addresses():
710             return ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues())
711         
712         class WebInterface(resource.Resource):
713             def __init__(self, func, mime_type, *fields):
714                 self.func, self.mime_type, self.fields = func, mime_type, fields
715             
716             def render_GET(self, request):
717                 request.setHeader('Content-Type', self.mime_type)
718                 request.setHeader('Access-Control-Allow-Origin', '*')
719                 return self.func(*(request.args[field][0] for field in self.fields))
720         
721         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
722         web_root.putChild('users', WebInterface(get_users, 'application/json'))
723         web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
724         web_root.putChild('current_payouts', WebInterface(get_current_payouts, 'application/json'))
725         web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain', 'total'))
726         web_root.putChild('global_stats', WebInterface(get_global_stats, 'application/json'))
727         web_root.putChild('local_stats', WebInterface(get_local_stats, 'application/json'))
728         web_root.putChild('peer_addresses', WebInterface(get_peer_addresses, 'text/plain'))
729         web_root.putChild('payout_addr', WebInterface(lambda: json.dumps(bitcoin_data.script2_to_human(my_script, net.PARENT)), 'application/json'))
730         web_root.putChild('recent_blocks', WebInterface(lambda: json.dumps(recent_blocks), 'application/json'))
731         if draw is not None:
732             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
733         
734         grapher = graphs.Grapher(os.path.join(datadir_path, 'rrd'))
735         web_root.putChild('graphs', grapher.get_resource())
736         def add_point():
737             if tracker.get_height(current_work.value['best_share_hash']) < 720:
738                 return
739             nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
740             poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
741             grapher.add_poolrate_point(poolrate, poolrate - nonstalerate)
742         task.LoopingCall(add_point).start(100)
743         
744         reactor.listenTCP(args.worker_port, server.Site(web_root))
745         
746         print '    ...success!'
747         print
748         
749         
750         @defer.inlineCallbacks
751         def work_poller():
752             while True:
753                 flag = factory.new_block.get_deferred()
754                 try:
755                     yield set_real_work1()
756                 except:
757                     log.err()
758                 yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
759         work_poller()
760         
761         
762         # done!
763         print 'Started successfully!'
764         print
765         
766         
767         if hasattr(signal, 'SIGALRM'):
768             signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
769                 sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
770             ))
771             signal.siginterrupt(signal.SIGALRM, False)
772             task.LoopingCall(signal.alarm, 30).start(1)
773         
774         if args.irc_announce:
775             from twisted.words.protocols import irc
776             class IRCClient(irc.IRCClient):
777                 nickname = 'p2pool'
778                 def lineReceived(self, line):
779                     print repr(line)
780                     irc.IRCClient.lineReceived(self, line)
781                 def signedOn(self):
782                     irc.IRCClient.signedOn(self)
783                     self.factory.resetDelay()
784                     self.join('#p2pool')
785                     self.watch_id = current_work.changed.watch(self._work_changed)
786                     self.announced_hashes = set()
787                 def _work_changed(self, new_work):
788                     share = tracker.shares[new_work['best_share_hash']]
789                     if share.pow_hash <= share.header['bits'].target and share.header_hash not in self.announced_hashes:
790                         self.say('#p2pool', '\x033,4BLOCK FOUND! http://blockexplorer.com/block/' + bitcoin_data.IntType(256, 'big').pack(share.header_hash).encode('hex'))
791                 def connectionLost(self, reason):
792                     current_work.changed.unwatch(self.watch_id)
793             class IRCClientFactory(protocol.ReconnectingClientFactory):
794                 protocol = IRCClient
795             reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())
796         
797         @defer.inlineCallbacks
798         def status_thread():
799             last_str = None
800             last_time = 0
801             while True:
802                 yield deferral.sleep(3)
803                 try:
804                     if time.time() > current_work2.value['last_update'] + 60:
805                         print >>sys.stderr, '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
806                     if current_work.value['best_share_hash'] is not None:
807                         height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
808                         if height > 2:
809                             att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720))
810                             weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
811                             (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
812                             stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], min(720, height))
813                             real_att_s = att_s / (1 - stale_prop)
814                             my_att_s = real_att_s*weights.get(my_script, 0)/total_weight
815                             this_str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i (%i incoming)' % (
816                                 math.format(int(real_att_s)),
817                                 height,
818                                 len(tracker.verified.shares),
819                                 len(tracker.shares),
820                                 weights.get(my_script, 0)/total_weight*100,
821                                 math.format(int(my_att_s)),
822                                 shares,
823                                 stale_orphan_shares,
824                                 stale_doa_shares,
825                                 len(p2p_node.peers),
826                                 sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
827                             ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
828                             this_str += '\nAverage time between blocks: %.2f days' % (
829                                 2**256 / current_work.value['bits'].target / real_att_s / (60 * 60 * 24),
830                             )
831                             this_str += '\nPool stales: %i%%' % (int(100*stale_prop+.5),)
832                             stale_center, stale_radius = math.binomial_conf_center_radius(stale_orphan_shares + stale_doa_shares, shares, 0.95)
833                             this_str += u' Own: %i±%i%%' % (int(100*stale_center+.5), int(100*stale_radius+.5))
834                             this_str += u' Own efficiency: %i±%i%%' % (int(100*(1 - stale_center)/(1 - stale_prop)+.5), int(100*stale_radius/(1 - stale_prop)+.5))
835                             if this_str != last_str or time.time() > last_time + 15:
836                                 print this_str
837                                 last_str = this_str
838                                 last_time = time.time()
839                 except:
840                     log.err()
841         status_thread()
842     except:
843         log.err(None, 'Fatal error:')
844
845 def run():
846     class FixedArgumentParser(argparse.ArgumentParser):
847         def _read_args_from_files(self, arg_strings):
848             # expand arguments referencing files
849             new_arg_strings = []
850             for arg_string in arg_strings:
851                 
852                 # for regular arguments, just add them back into the list
853                 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
854                     new_arg_strings.append(arg_string)
855                 
856                 # replace arguments referencing files with the file content
857                 else:
858                     try:
859                         args_file = open(arg_string[1:])
860                         try:
861                             arg_strings = []
862                             for arg_line in args_file.read().splitlines():
863                                 for arg in self.convert_arg_line_to_args(arg_line):
864                                     arg_strings.append(arg)
865                             arg_strings = self._read_args_from_files(arg_strings)
866                             new_arg_strings.extend(arg_strings)
867                         finally:
868                             args_file.close()
869                     except IOError:
870                         err = sys.exc_info()[1]
871                         self.error(str(err))
872             
873             # return the modified argument list
874             return new_arg_strings
875         
876         def convert_arg_line_to_args(self, arg_line):
877             return [arg for arg in arg_line.split() if arg.strip()]
878     
879     parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
880     parser.add_argument('--version', action='version', version=p2pool.__version__)
881     parser.add_argument('--net',
882         help='use specified network (default: bitcoin)',
883         action='store', choices=sorted(networks.realnets), default='bitcoin', dest='net_name')
884     parser.add_argument('--testnet',
885         help='''use the network's testnet''',
886         action='store_const', const=True, default=False, dest='testnet')
887     parser.add_argument('--debug',
888         help='enable debugging mode',
889         action='store_const', const=True, default=False, dest='debug')
890     parser.add_argument('-a', '--address',
891         help='generate payouts to this address (default: <address requested from bitcoind>)',
892         type=str, action='store', default=None, dest='address')
893     parser.add_argument('--logfile',
894         help='''log to this file (default: data/<NET>/log)''',
895         type=str, action='store', default=None, dest='logfile')
896     parser.add_argument('--merged-url',
897         help='call getauxblock on this url to get work for merged mining (example: http://127.0.0.1:10332/)',
898         type=str, action='store', default=None, dest='merged_url')
899     parser.add_argument('--merged-userpass',
900         help='use this user and password when requesting merged mining work (example: ncuser:ncpass)',
901         type=str, action='store', default=None, dest='merged_userpass')
902     parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
903         help='donate this percentage of work to author of p2pool (default: 0.5)',
904         type=float, action='store', default=0.5, dest='donation_percentage')
905     parser.add_argument('--irc-announce',
906         help='announce any blocks found on irc://irc.freenode.net/#p2pool',
907         action='store_true', default=False, dest='irc_announce')
908     
909     p2pool_group = parser.add_argument_group('p2pool interface')
910     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
911         help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.P2P_PORT) for _, n in sorted(networks.realnets.items())),
912         type=int, action='store', default=None, dest='p2pool_port')
913     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
914         help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
915         type=str, action='append', default=[], dest='p2pool_nodes')
916     parser.add_argument('--disable-upnp',
917         help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
918         action='store_false', default=True, dest='upnp')
919     
920     worker_group = parser.add_argument_group('worker interface')
921     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
922         help='listen on PORT for RPC connections from miners (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.WORKER_PORT) for _, n in sorted(networks.realnets.items())),
923         type=int, action='store', default=None, dest='worker_port')
924     worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
925         help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
926         type=float, action='store', default=0, dest='worker_fee')
927     
928     bitcoind_group = parser.add_argument_group('bitcoind interface')
929     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
930         help='connect to this address (default: 127.0.0.1)',
931         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
932     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
933         help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (n.NAME, n.PARENT.RPC_PORT) for _, n in sorted(networks.realnets.items())),
934         type=int, action='store', default=None, dest='bitcoind_rpc_port')
935     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
936         help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (n.NAME, n.PARENT.P2P_PORT) for _, n in sorted(networks.realnets.items())),
937         type=int, action='store', default=None, dest='bitcoind_p2p_port')
938     
939     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
940         help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
941         type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
942     
943     args = parser.parse_args()
944     
945     if args.debug:
946         p2pool.DEBUG = True
947     
948     net = networks.nets[args.net_name + ('_testnet' if args.testnet else '')]
949     
950     datadir_path = os.path.join(os.path.dirname(sys.argv[0]), 'data', net.NAME)
951     if not os.path.exists(datadir_path):
952         os.makedirs(datadir_path)
953     
954     if len(args.bitcoind_rpc_userpass) > 2:
955         parser.error('a maximum of two arguments are allowed')
956     args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
957     
958     if args.bitcoind_rpc_password is None:
959         if not hasattr(net, 'CONF_FILE_FUNC'):
960             parser.error('This network has no configuration file function. Manually enter your RPC password.')
961         conf_path = net.CONF_FILE_FUNC()
962         if not os.path.exists(conf_path):
963             parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
964                 '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
965                 '''\r\n'''
966                 '''server=true\r\n'''
967                 '''rpcpassword=%x # (randomly generated for your convenience)''' % (conf_path, random.randrange(2**128)))
968         with open(conf_path, 'rb') as f:
969             cp = ConfigParser.RawConfigParser()
970             cp.readfp(StringIO.StringIO('[x]\r\n' + f.read()))
971             for conf_name, var_name, var_type in [
972                 ('rpcuser', 'bitcoind_rpc_username', str),
973                 ('rpcpassword', 'bitcoind_rpc_password', str),
974                 ('rpcport', 'bitcoind_rpc_port', int),
975                 ('port', 'bitcoind_p2p_port', int),
976             ]:
977                 if getattr(args, var_name) is None and cp.has_option('x', conf_name):
978                     setattr(args, var_name, var_type(cp.get('x', conf_name)))
979     
980     if args.bitcoind_rpc_username is None:
981         args.bitcoind_rpc_username = ''
982     
983     if args.bitcoind_rpc_port is None:
984         args.bitcoind_rpc_port = net.PARENT.RPC_PORT
985     
986     if args.bitcoind_p2p_port is None:
987         args.bitcoind_p2p_port = net.PARENT.P2P_PORT
988     
989     if args.p2pool_port is None:
990         args.p2pool_port = net.P2P_PORT
991     
992     if args.worker_port is None:
993         args.worker_port = net.WORKER_PORT
994     
995     if args.address is not None:
996         try:
997             args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
998         except Exception, e:
999             parser.error('error parsing address: ' + repr(e))
1000     else:
1001         args.pubkey_hash = None
1002     
1003     if (args.merged_url is None) ^ (args.merged_userpass is None):
1004         parser.error('must specify --merged-url and --merged-userpass')
1005     
1006     
1007     if args.logfile is None:
1008         args.logfile = os.path.join(datadir_path, 'log')
1009     
1010     logfile = logging.LogFile(args.logfile)
1011     pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
1012     sys.stdout = logging.AbortPipe(pipe)
1013     sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
1014     if hasattr(signal, "SIGUSR1"):
1015         def sigusr1(signum, frame):
1016             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
1017             logfile.reopen()
1018             print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
1019         signal.signal(signal.SIGUSR1, sigusr1)
1020     task.LoopingCall(logfile.reopen).start(5)
1021     
1022     reactor.callWhenRunning(main, args, net, datadir_path)
1023     reactor.run()