added new share type with cutoff date and incremented protocol version
[p2pool.git] / p2pool / main.py
1 from __future__ import division
2
3 import ConfigParser
4 import StringIO
5 import argparse
6 import base64
7 import os
8 import random
9 import sys
10 import time
11 import signal
12 import traceback
13 import urlparse
14
15 from twisted.internet import defer, reactor, protocol, task
16 from twisted.web import server
17 from twisted.python import log
18 from nattraverso import portmapper, ipdiscover
19
20 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
21 from bitcoin import worker_interface, height_tracker
22 from util import expiring_dict, jsonrpc, variable, deferral, math, logging, pack
23 from . import p2p, networks, web
24 import p2pool, p2pool.data as p2pool_data
25
26 @deferral.retry('Error getting work from bitcoind:', 3)
27 @defer.inlineCallbacks
28 def getwork(bitcoind):
29     try:
30         work = yield bitcoind.rpc_getmemorypool()
31     except jsonrpc.Error, e:
32         if e.code == -32601: # Method not found
33             print >>sys.stderr, 'Error: Bitcoin version too old! Upgrade to v0.5 or newer!'
34             raise deferral.RetrySilentlyException()
35         raise
36     packed_transactions = [x.decode('hex') for x in work['transactions']]
37     defer.returnValue(dict(
38         version=work['version'],
39         previous_block_hash=int(work['previousblockhash'], 16),
40         transactions=map(bitcoin_data.tx_type.unpack, packed_transactions),
41         merkle_link=bitcoin_data.calculate_merkle_link([0] + map(bitcoin_data.hash256, packed_transactions), 0), # using 0 is a bit of a hack, but will always work when index=0
42         subsidy=work['coinbasevalue'],
43         time=work['time'],
44         bits=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
45         coinbaseflags=work['coinbaseflags'].decode('hex') if 'coinbaseflags' in work else ''.join(x.decode('hex') for x in work['coinbaseaux'].itervalues()) if 'coinbaseaux' in work else '',
46     ))
47
48 @defer.inlineCallbacks
49 def main(args, net, datadir_path, merged_urls, worker_endpoint):
50     try:
51         print 'p2pool (version %s)' % (p2pool.__version__,)
52         print
53         
54         # connect to bitcoind over JSON-RPC and do initial getmemorypool
55         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
56         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
57         bitcoind = jsonrpc.Proxy(url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30)
58         @deferral.retry('Error while checking Bitcoin connection:', 1)
59         @defer.inlineCallbacks
60         def check():
61             if not (yield net.PARENT.RPC_CHECK)(bitcoind):
62                 print >>sys.stderr, "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
63                 raise deferral.RetrySilentlyException()
64             v = (yield bitcoind.rpc_getinfo())['version']
65             temp_work = yield getwork(bitcoind)
66             if not net.VERSION_CHECK((v//10000, v//100%100, v%100), temp_work):
67                 print >>sys.stderr, '    Bitcoin version too old! BIP16 support required! Upgrade to 0.6.0rc4 or greater!'
68                 raise deferral.RetrySilentlyException()
69             defer.returnValue(temp_work)
70         temp_work = yield check()
71         print '    ...success!'
72         print '    Current block hash: %x' % (temp_work['previous_block_hash'],)
73         print
74         
75         # connect to bitcoind over bitcoin-p2p
76         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
77         factory = bitcoin_p2p.ClientFactory(net.PARENT)
78         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
79         yield factory.getProtocol() # waits until handshake is successful
80         print '    ...success!'
81         print
82         
83         print 'Determining payout address...'
84         if args.pubkey_hash is None:
85             address_path = os.path.join(datadir_path, 'cached_payout_address')
86             
87             if os.path.exists(address_path):
88                 with open(address_path, 'rb') as f:
89                     address = f.read().strip('\r\n')
90                 print '    Loaded cached address: %s...' % (address,)
91             else:
92                 address = None
93             
94             if address is not None:
95                 res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))()
96                 if not res['isvalid'] or not res['ismine']:
97                     print '    Cached address is either invalid or not controlled by local bitcoind!'
98                     address = None
99             
100             if address is None:
101                 print '    Getting payout address from bitcoind...'
102                 address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()
103             
104             with open(address_path, 'wb') as f:
105                 f.write(address)
106             
107             my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(address, net.PARENT)
108         else:
109             my_pubkey_hash = args.pubkey_hash
110         print '    ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)
111         print
112         
113         my_share_hashes = set()
114         my_doa_share_hashes = set()
115         
116         tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes)
117         shared_share_hashes = set()
118         ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
119         known_verified = set()
120         recent_blocks = []
121         print "Loading shares..."
122         for i, (mode, contents) in enumerate(ss.get_shares()):
123             if mode == 'share':
124                 if contents.hash in tracker.shares:
125                     continue
126                 shared_share_hashes.add(contents.hash)
127                 contents.time_seen = 0
128                 tracker.add(contents)
129                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
130                     print "    %i" % (len(tracker.shares),)
131             elif mode == 'verified_hash':
132                 known_verified.add(contents)
133             else:
134                 raise AssertionError()
135         print "    ...inserting %i verified shares..." % (len(known_verified),)
136         for h in known_verified:
137             if h not in tracker.shares:
138                 ss.forget_verified_share(h)
139                 continue
140             tracker.verified.add(tracker.shares[h])
141         print "    ...done loading %i shares!" % (len(tracker.shares),)
142         print
143         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
144         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
145         tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
146         
147         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
148         
149         pre_current_work = variable.Variable(None)
150         pre_merged_work = variable.Variable({})
151         # information affecting work that should trigger a long-polling update
152         current_work = variable.Variable(None)
153         # information affecting work that should not trigger a long-polling update
154         current_work2 = variable.Variable(None)
155         
156         requested = expiring_dict.ExpiringDict(300)
157         
158         print 'Initializing work...'
159         @defer.inlineCallbacks
160         def set_real_work1():
161             work = yield getwork(bitcoind)
162             current_work2.set(dict(
163                 time=work['time'],
164                 transactions=work['transactions'],
165                 merkle_link=work['merkle_link'],
166                 subsidy=work['subsidy'],
167                 clock_offset=time.time() - work['time'],
168                 last_update=time.time(),
169             )) # second set first because everything hooks on the first
170             pre_current_work.set(dict(
171                 version=work['version'],
172                 previous_block=work['previous_block_hash'],
173                 bits=work['bits'],
174                 coinbaseflags=work['coinbaseflags'],
175             ))
176         yield set_real_work1()
177         
178         get_height_rel_highest = yield height_tracker.get_height_rel_highest_func(bitcoind, factory, lambda: pre_current_work.value['previous_block'], net)
179         
180         def set_real_work2():
181             best, desired = tracker.think(get_height_rel_highest, pre_current_work.value['previous_block'], pre_current_work.value['bits'])
182             
183             t = dict(pre_current_work.value)
184             t['best_share_hash'] = best
185             t['mm_chains'] = pre_merged_work.value
186             current_work.set(t)
187             
188             t = time.time()
189             for peer2, share_hash in desired:
190                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
191                     continue
192                 last_request_time, count = requested.get(share_hash, (None, 0))
193                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
194                     continue
195                 potential_peers = set()
196                 for head in tracker.tails[share_hash]:
197                     potential_peers.update(peer_heads.get(head, set()))
198                 potential_peers = [peer for peer in potential_peers if peer.connected2]
199                 if count == 0 and peer2 is not None and peer2.connected2:
200                     peer = peer2
201                 else:
202                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
203                     if peer is None:
204                         continue
205                 
206                 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
207                 peer.send_getshares(
208                     hashes=[share_hash],
209                     parents=2000,
210                     stops=list(set(tracker.heads) | set(
211                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
212                     ))[:100],
213                 )
214                 requested[share_hash] = t, count + 1
215         pre_current_work.changed.watch(lambda _: set_real_work2())
216         pre_merged_work.changed.watch(lambda _: set_real_work2())
217         set_real_work2()
218         print '    ...success!'
219         print
220         
221         
222         @defer.inlineCallbacks
223         def set_merged_work(merged_url, merged_userpass):
224             merged_proxy = jsonrpc.Proxy(merged_url, dict(Authorization='Basic ' + base64.b64encode(merged_userpass)))
225             while True:
226                 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged_proxy.rpc_getauxblock)()
227                 pre_merged_work.set(dict(pre_merged_work.value, **{auxblock['chainid']: dict(
228                     hash=int(auxblock['hash'], 16),
229                     target=pack.IntType(256).unpack(auxblock['target'].decode('hex')),
230                     merged_proxy=merged_proxy,
231                 )}))
232                 yield deferral.sleep(1)
233         for merged_url, merged_userpass in merged_urls:
234             set_merged_work(merged_url, merged_userpass)
235         
236         @pre_merged_work.changed.watch
237         def _(new_merged_work):
238             print 'Got new merged mining work!'
239         
240         # setup p2p logic and join p2pool network
241         
242         class Node(p2p.Node):
243             def handle_shares(self, shares, peer):
244                 if len(shares) > 5:
245                     print 'Processing %i shares from %s...' % (len(shares), '%s:%i' % peer.addr if peer is not None else None)
246                 
247                 new_count = 0
248                 for share in shares:
249                     if share.hash in tracker.shares:
250                         #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
251                         continue
252                     
253                     new_count += 1
254                     
255                     #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
256                     
257                     tracker.add(share)
258                 
259                 if shares and peer is not None:
260                     peer_heads.setdefault(shares[0].hash, set()).add(peer)
261                 
262                 if new_count:
263                     set_real_work2()
264                 
265                 if len(shares) > 5:
266                     print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
267             
268             def handle_share_hashes(self, hashes, peer):
269                 t = time.time()
270                 get_hashes = []
271                 for share_hash in hashes:
272                     if share_hash in tracker.shares:
273                         continue
274                     last_request_time, count = requested.get(share_hash, (None, 0))
275                     if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
276                         continue
277                     print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
278                     get_hashes.append(share_hash)
279                     requested[share_hash] = t, count + 1
280                 
281                 if hashes and peer is not None:
282                     peer_heads.setdefault(hashes[0], set()).add(peer)
283                 if get_hashes:
284                     peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
285             
286             def handle_get_shares(self, hashes, parents, stops, peer):
287                 parents = min(parents, 1000//len(hashes))
288                 stops = set(stops)
289                 shares = []
290                 for share_hash in hashes:
291                     for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))):
292                         if share.hash in stops:
293                             break
294                         shares.append(share)
295                 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
296                 return shares
297         
298         @deferral.retry('Error submitting block: (will retry)', 10, 10)
299         @defer.inlineCallbacks
300         def submit_block(block, ignore_failure):
301             success = yield bitcoind.rpc_getmemorypool(bitcoin_data.block_type.pack(block).encode('hex'))
302             success_expected = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(block['header'])) <= block['header']['bits'].target
303             if (not success and success_expected and not ignore_failure) or (success and not success_expected):
304                 print >>sys.stderr, 'Block submittal result: %s Expected: %s' % (result, expected_result)
305         
306         @tracker.verified.added.watch
307         def _(share):
308             if share.pow_hash <= share.header['bits'].target:
309                 submit_block(share.as_block(tracker), ignore_failure=True)
310                 print
311                 print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % (p2pool_data.format_hash(share.hash), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
312                 print
313                 recent_blocks.append(dict(ts=share.timestamp, hash='%064x' % (share.header_hash,)))
314         
315         print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
316         
317         @defer.inlineCallbacks
318         def parse(x):
319             if ':' in x:
320                 ip, port = x.split(':')
321                 defer.returnValue(((yield reactor.resolve(ip)), int(port)))
322             else:
323                 defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT))
324         
325         addrs = {}
326         if os.path.exists(os.path.join(datadir_path, 'addrs.txt')):
327             try:
328                 addrs.update(dict(eval(x) for x in open(os.path.join(datadir_path, 'addrs.txt'))))
329             except:
330                 print >>sys.stderr, "error reading addrs"
331         for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
332             try:
333                 addr = yield addr_df
334                 if addr not in addrs:
335                     addrs[addr] = (0, time.time(), time.time())
336             except:
337                 log.err()
338         
339         connect_addrs = set()
340         for addr_df in map(parse, args.p2pool_nodes):
341             try:
342                 connect_addrs.add((yield addr_df))
343             except:
344                 log.err()
345         
346         p2p_node = Node(
347             best_share_hash_func=lambda: current_work.value['best_share_hash'],
348             port=args.p2pool_port,
349             net=net,
350             addr_store=addrs,
351             connect_addrs=connect_addrs,
352             max_incoming_conns=args.p2pool_conns,
353         )
354         p2p_node.start()
355         
356         task.LoopingCall(lambda: open(os.path.join(datadir_path, 'addrs.txt'), 'w').writelines(repr(x) + '\n' for x in p2p_node.addr_store.iteritems())).start(60)
357         
358         # send share when the chain changes to their chain
359         def work_changed(new_work):
360             #print 'Work changed:', new_work
361             shares = []
362             for share in tracker.get_chain(new_work['best_share_hash'], min(5, tracker.get_height(new_work['best_share_hash']))):
363                 if share.hash in shared_share_hashes:
364                     break
365                 shared_share_hashes.add(share.hash)
366                 shares.append(share)
367             
368             for peer in p2p_node.peers.itervalues():
369                 peer.sendShares([share for share in shares if share.peer is not peer])
370         
371         current_work.changed.watch(work_changed)
372         
373         def save_shares():
374             for share in tracker.get_chain(current_work.value['best_share_hash'], min(tracker.get_height(current_work.value['best_share_hash']), 2*net.CHAIN_LENGTH)):
375                 ss.add_share(share)
376                 if share.hash in tracker.verified.shares:
377                     ss.add_verified_hash(share.hash)
378         task.LoopingCall(save_shares).start(60)
379         
380         print '    ...success!'
381         print
382         
383         if args.upnp:
384             @defer.inlineCallbacks
385             def upnp_thread():
386                 while True:
387                     try:
388                         is_lan, lan_ip = yield ipdiscover.get_local_ip()
389                         if is_lan:
390                             pm = yield portmapper.get_port_mapper()
391                             yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
392                     except defer.TimeoutError:
393                         pass
394                     except:
395                         if p2pool.DEBUG:
396                             log.err(None, 'UPnP error:')
397                     yield deferral.sleep(random.expovariate(1/120))
398             upnp_thread()
399         
400         # start listening for workers with a JSON-RPC server
401         
402         print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1])
403         
404         if os.path.exists(os.path.join(datadir_path, 'vip_pass')):
405             with open(os.path.join(datadir_path, 'vip_pass'), 'rb') as f:
406                 vip_pass = f.read().strip('\r\n')
407         else:
408             vip_pass = '%016x' % (random.randrange(2**64),)
409             with open(os.path.join(datadir_path, 'vip_pass'), 'wb') as f:
410                 f.write(vip_pass)
411         print '    Worker password:', vip_pass, '(only required for generating graphs)'
412         
413         # setup worker logic
414         
415         removed_unstales_var = variable.Variable((0, 0, 0))
416         removed_doa_unstales_var = variable.Variable(0)
417         @tracker.verified.removed.watch
418         def _(share):
419             if share.hash in my_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
420                 assert share.share_data['stale_info'] in [0, 253, 254] # we made these shares in this instance
421                 removed_unstales_var.set((
422                     removed_unstales_var.value[0] + 1,
423                     removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 253 else 0),
424                     removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 254 else 0),
425                 ))
426             if share.hash in my_doa_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
427                 removed_doa_unstales.set(removed_doa_unstales.value + 1)
428         
429         def get_stale_counts():
430             '''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)'''
431             my_shares = len(my_share_hashes)
432             my_doa_shares = len(my_doa_share_hashes)
433             delta = tracker.verified.get_delta(current_work.value['best_share_hash'])
434             my_shares_in_chain = delta.my_count + removed_unstales_var.value[0]
435             my_doa_shares_in_chain = delta.my_doa_count + removed_doa_unstales_var.value
436             orphans_recorded_in_chain = delta.my_orphan_announce_count + removed_unstales_var.value[1]
437             doas_recorded_in_chain = delta.my_dead_announce_count + removed_unstales_var.value[2]
438             
439             my_shares_not_in_chain = my_shares - my_shares_in_chain
440             my_doa_shares_not_in_chain = my_doa_shares - my_doa_shares_in_chain
441             
442             return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
443         
444         
445         pseudoshare_received = variable.Event()
446         local_rate_monitor = math.RateMonitor(10*60)
447         
448         class WorkerBridge(worker_interface.WorkerBridge):
449             def __init__(self):
450                 worker_interface.WorkerBridge.__init__(self)
451                 self.new_work_event = current_work.changed
452                 self.recent_shares_ts_work = []
453             
454             def preprocess_request(self, request):
455                 user = request.getUser() if request.getUser() is not None else ''
456                 
457                 desired_pseudoshare_target = None
458                 if '+' in user:
459                     user, desired_pseudoshare_difficulty_str = user.rsplit('+', 1)
460                     try:
461                         desired_pseudoshare_target = bitcoin_data.difficulty_to_target(float(desired_pseudoshare_difficulty_str))
462                     except:
463                         pass
464                 
465                 desired_share_target = 2**256 - 1
466                 if '/' in user:
467                     user, min_diff_str = user.rsplit('/', 1)
468                     try:
469                         desired_share_target = bitcoin_data.difficulty_to_target(float(min_diff_str))
470                     except:
471                         pass
472                 
473                 if random.uniform(0, 100) < args.worker_fee:
474                     pubkey_hash = my_pubkey_hash
475                 else:
476                     try:
477                         pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, net.PARENT)
478                     except: # XXX blah
479                         pubkey_hash = my_pubkey_hash
480                 
481                 return pubkey_hash, desired_share_target, desired_pseudoshare_target
482             
483             def get_work(self, pubkey_hash, desired_share_target, desired_pseudoshare_target):
484                 if len(p2p_node.peers) == 0 and net.PERSIST:
485                     raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
486                 if current_work.value['best_share_hash'] is None and net.PERSIST:
487                     raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
488                 if time.time() > current_work2.value['last_update'] + 60:
489                     raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
490                 
491                 if current_work.value['mm_chains']:
492                     tree, size = bitcoin_data.make_auxpow_tree(current_work.value['mm_chains'])
493                     mm_hashes = [current_work.value['mm_chains'].get(tree.get(i), dict(hash=0))['hash'] for i in xrange(size)]
494                     mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(dict(
495                         merkle_root=bitcoin_data.merkle_hash(mm_hashes),
496                         size=size,
497                         nonce=0,
498                     ))
499                     mm_later = [(aux_work, mm_hashes.index(aux_work['hash']), mm_hashes) for chain_id, aux_work in current_work.value['mm_chains'].iteritems()]
500                 else:
501                     mm_data = ''
502                     mm_later = []
503                 
504                 def predict_timestamp():
505                     desired_timestamp = int(time.time() - current_work2.value['clock_offset'])
506                     previous_share = tracker.shares[current_work.value['best_share_hash']] if current_work.value['best_share_hash'] is not None else None
507                     return math.clip(desired_timestamp, (
508                         (previous_share.timestamp + net.SHARE_PERIOD) - (net.SHARE_PERIOD - 1), # = previous_share.timestamp + 1
509                         (previous_share.timestamp + net.SHARE_PERIOD) + (net.SHARE_PERIOD - 1),
510                     )) if previous_share is not None else desired_timestamp
511                 new = predict_timestamp() >= net.SWITCH_TIME
512                 if new:
513                     share_info, generate_tx = p2pool_data.NewShare.generate_transaction(
514                         tracker=tracker,
515                         share_data=dict(
516                             previous_share_hash=current_work.value['best_share_hash'],
517                             coinbase=(mm_data + current_work.value['coinbaseflags'])[:100],
518                             nonce=random.randrange(2**32),
519                             pubkey_hash=pubkey_hash,
520                             subsidy=current_work2.value['subsidy'],
521                             donation=math.perfect_round(65535*args.donation_percentage/100),
522                             stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
523                                 253 if orphans > orphans_recorded_in_chain else
524                                 254 if doas > doas_recorded_in_chain else
525                                 0
526                             )(*get_stale_counts()),
527                             desired_version=1,
528                         ),
529                         block_target=current_work.value['bits'].target,
530                         desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
531                         desired_target=desired_share_target,
532                         ref_merkle_link=dict(branch=[], index=0),
533                         net=net,
534                     )
535                 else:
536                     share_info, generate_tx = p2pool_data.Share.generate_transaction(
537                         tracker=tracker,
538                         share_data=dict(
539                             previous_share_hash=current_work.value['best_share_hash'],
540                             coinbase=(mm_data + current_work.value['coinbaseflags'])[:100],
541                             nonce=random.randrange(2**32),
542                             pubkey_hash=pubkey_hash,
543                             subsidy=current_work2.value['subsidy'],
544                             donation=math.perfect_round(65535*args.donation_percentage/100),
545                             stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
546                                 253 if orphans > orphans_recorded_in_chain else
547                                 254 if doas > doas_recorded_in_chain else
548                                 0
549                             )(*get_stale_counts()),
550                         ),
551                         block_target=current_work.value['bits'].target,
552                         desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
553                         desired_target=desired_share_target,
554                         net=net,
555                     )
556                 
557                 target = net.PARENT.SANE_MAX_TARGET
558                 if desired_pseudoshare_target is None:
559                     if len(self.recent_shares_ts_work) == 50:
560                         hash_rate = sum(work for ts, work in self.recent_shares_ts_work[1:])//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
561                         target = min(target, 2**256//hash_rate)
562                 else:
563                     target = min(target, desired_pseudoshare_target)
564                 target = max(target, share_info['bits'].target)
565                 for aux_work in current_work.value['mm_chains'].itervalues():
566                     target = max(target, aux_work['target'])
567                 
568                 transactions = [generate_tx] + list(current_work2.value['transactions'])
569                 packed_generate_tx = bitcoin_data.tx_type.pack(generate_tx)
570                 merkle_root = bitcoin_data.check_merkle_link(bitcoin_data.hash256(packed_generate_tx), current_work2.value['merkle_link'])
571                 
572                 getwork_time = time.time()
573                 merkle_link = current_work2.value['merkle_link']
574                 
575                 print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % (
576                     bitcoin_data.target_to_difficulty(target),
577                     bitcoin_data.target_to_difficulty(share_info['bits'].target),
578                     current_work2.value['subsidy']*1e-8, net.PARENT.SYMBOL,
579                     len(current_work2.value['transactions']),
580                 )
581                 
582                 ba = bitcoin_getwork.BlockAttempt(
583                     version=current_work.value['version'],
584                     previous_block=current_work.value['previous_block'],
585                     merkle_root=merkle_root,
586                     timestamp=current_work2.value['time'],
587                     bits=current_work.value['bits'],
588                     share_target=target,
589                 )
590                 
591                 received_header_hashes = set()
592                 
593                 def got_response(header, request):
594                     assert header['merkle_root'] == merkle_root
595                     
596                     header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header))
597                     pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
598                     on_time = current_work.value['best_share_hash'] == share_info['share_data']['previous_share_hash']
599                     
600                     try:
601                         if pow_hash <= header['bits'].target or p2pool.DEBUG:
602                             submit_block(dict(header=header, txs=transactions), ignore_failure=False)
603                             if pow_hash <= header['bits'].target:
604                                 print
605                                 print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
606                                 print
607                                 recent_blocks.append(dict(ts=time.time(), hash='%064x' % (header_hash,)))
608                     except:
609                         log.err(None, 'Error while processing potential block:')
610                     
611                     for aux_work, index, hashes in mm_later:
612                         try:
613                             if pow_hash <= aux_work['target'] or p2pool.DEBUG:
614                                 df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)(
615                                     pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
616                                     bitcoin_data.aux_pow_type.pack(dict(
617                                         merkle_tx=dict(
618                                             tx=transactions[0],
619                                             block_hash=header_hash,
620                                             merkle_link=merkle_link,
621                                         ),
622                                         merkle_link=bitcoin_data.calculate_merkle_link(hashes, index),
623                                         parent_block_header=header,
624                                     )).encode('hex'),
625                                 )
626                                 @df.addCallback
627                                 def _(result):
628                                     if result != (pow_hash <= aux_work['target']):
629                                         print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
630                                     else:
631                                         print 'Merged block submittal result: %s' % (result,)
632                                 @df.addErrback
633                                 def _(err):
634                                     log.err(err, 'Error submitting merged block:')
635                         except:
636                             log.err(None, 'Error while processing merged mining POW:')
637                     
638                     if pow_hash <= share_info['bits'].target:
639                         min_header = dict(header);del min_header['merkle_root']
640                         hash_link = p2pool_data.prefix_to_hash_link(packed_generate_tx[:-32-4], p2pool_data.Share.gentx_before_refhash)
641                         if new:
642                             share = p2pool_data.NewShare(net, None, dict(
643                                 min_header=min_header, share_info=share_info, hash_link=hash_link,
644                                 ref_merkle_link=dict(branch=[], index=0),
645                             ), merkle_link=merkle_link, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
646                         else:
647                             share = p2pool_data.Share(net, None, min_header, share_info, hash_link=hash_link, merkle_link=merkle_link, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
648                         
649                         print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
650                             request.getUser(),
651                             p2pool_data.format_hash(share.hash),
652                             p2pool_data.format_hash(share.previous_hash),
653                             time.time() - getwork_time,
654                             ' DEAD ON ARRIVAL' if not on_time else '',
655                         )
656                         my_share_hashes.add(share.hash)
657                         if not on_time:
658                             my_doa_share_hashes.add(share.hash)
659                         
660                         tracker.add(share)
661                         if not p2pool.DEBUG:
662                             tracker.verified.add(share)
663                         set_real_work2()
664                         
665                         try:
666                             if pow_hash <= header['bits'].target or p2pool.DEBUG:
667                                 for peer in p2p_node.peers.itervalues():
668                                     peer.sendShares([share])
669                                 shared_share_hashes.add(share.hash)
670                         except:
671                             log.err(None, 'Error forwarding block solution:')
672                     
673                     if pow_hash > target:
674                         print 'Worker %s submitted share with hash > target:' % (request.getUser(),)
675                         print '    Hash:   %56x' % (pow_hash,)
676                         print '    Target: %56x' % (target,)
677                     elif header_hash in received_header_hashes:
678                         print >>sys.stderr, 'Worker %s @ %s submitted share more than once!' % (request.getUser(), request.getClientIP())
679                     else:
680                         received_header_hashes.add(header_hash)
681                         
682                         pseudoshare_received.happened(bitcoin_data.target_to_average_attempts(target), not on_time, request.getUser() if request.getPassword() == vip_pass else None)
683                         self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
684                         while len(self.recent_shares_ts_work) > 50:
685                             self.recent_shares_ts_work.pop(0)
686                         local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=request.getUser()))
687                     
688                     return on_time
689                 
690                 return ba, got_response
691         
692         get_current_txouts = lambda: p2pool_data.get_expected_payouts(tracker, current_work.value['best_share_hash'], current_work.value['bits'].target, current_work2.value['subsidy'], net)
693         
694         web_root = web.get_web_root(tracker, current_work, current_work2, get_current_txouts, datadir_path, net, get_stale_counts, my_pubkey_hash, local_rate_monitor, args.worker_fee, p2p_node, my_share_hashes, recent_blocks, pseudoshare_received)
695         worker_interface.WorkerInterface(WorkerBridge()).attach_to(web_root)
696         
697         deferral.retry('Error binding to worker port:', traceback=False)(reactor.listenTCP)(worker_endpoint[1], server.Site(web_root), interface=worker_endpoint[0])
698         
699         with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
700             pass
701         
702         print '    ...success!'
703         print
704         
705         
706         @defer.inlineCallbacks
707         def work_poller():
708             while True:
709                 flag = factory.new_block.get_deferred()
710                 try:
711                     yield set_real_work1()
712                 except:
713                     log.err()
714                 yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
715         work_poller()
716         
717         
718         # done!
719         print 'Started successfully!'
720         print
721         
722         
723         if hasattr(signal, 'SIGALRM'):
724             signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
725                 sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
726             ))
727             signal.siginterrupt(signal.SIGALRM, False)
728             task.LoopingCall(signal.alarm, 30).start(1)
729         
730         if args.irc_announce:
731             from twisted.words.protocols import irc
732             class IRCClient(irc.IRCClient):
733                 nickname = 'p2pool%02i' % (random.randrange(100),)
734                 channel = net.ANNOUNCE_CHANNEL
735                 def lineReceived(self, line):
736                     print repr(line)
737                     irc.IRCClient.lineReceived(self, line)
738                 def signedOn(self):
739                     irc.IRCClient.signedOn(self)
740                     self.factory.resetDelay()
741                     self.join(self.channel)
742                     @defer.inlineCallbacks
743                     def new_share(share):
744                         if share.pow_hash <= share.header['bits'].target and abs(share.timestamp - time.time()) < 10*60:
745                             yield deferral.sleep(random.expovariate(1/60))
746                             message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
747                             if message not in self.recent_messages:
748                                 self.say(self.channel, message)
749                                 self._remember_message(message)
750                     self.watch_id = tracker.verified.added.watch(new_share)
751                     self.recent_messages = []
752                 def _remember_message(self, message):
753                     self.recent_messages.append(message)
754                     while len(self.recent_message) > 100:
755                         self.recent_messages.pop(0)
756                 def privmsg(self, user, channel, message):
757                     if channel == self.channel:
758                         self._remember_message(message)
759                 def connectionLost(self, reason):
760                     tracker.verified.added.unwatch(self.watch_id)
761                     print 'IRC connection lost:', reason.getErrorMessage()
762             class IRCClientFactory(protocol.ReconnectingClientFactory):
763                 protocol = IRCClient
764             reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())
765         
766         @defer.inlineCallbacks
767         def status_thread():
768             last_str = None
769             last_time = 0
770             while True:
771                 yield deferral.sleep(3)
772                 try:
773                     if time.time() > current_work2.value['last_update'] + 60:
774                         print >>sys.stderr, '''---> LOST CONTACT WITH BITCOIND for %s! Check that it isn't frozen or dead! <---''' % (math.format_dt(time.time() - current_work2.value['last_update']),)
775                     
776                     height = tracker.get_height(current_work.value['best_share_hash'])
777                     this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
778                         height,
779                         len(tracker.verified.shares),
780                         len(tracker.shares),
781                         len(p2p_node.peers),
782                         sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
783                     ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
784                     
785                     datums, dt = local_rate_monitor.get_datums_in_last()
786                     my_att_s = sum(datum['work']/dt for datum in datums)
787                     this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
788                         math.format(int(my_att_s)),
789                         math.format_dt(dt),
790                         math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95),
791                         math.format_dt(2**256 / tracker.shares[current_work.value['best_share_hash']].max_target / my_att_s) if my_att_s and current_work.value['best_share_hash'] else '???',
792                     )
793                     
794                     if height > 2:
795                         (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
796                         stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], min(720, height))
797                         real_att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720)) / (1 - stale_prop)
798                         
799                         this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % (
800                             shares, stale_orphan_shares, stale_doa_shares,
801                             math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
802                             math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
803                             get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8, net.PARENT.SYMBOL,
804                         )
805                         this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
806                             math.format(int(real_att_s)),
807                             100*stale_prop,
808                             math.format_dt(2**256 / current_work.value['bits'].target / real_att_s),
809                         )
810                         
811                         desired_version_counts = p2pool_data.get_desired_version_counts(tracker, current_work.value['best_share_hash'], min(720, height))
812                         majority_desired_version = max(desired_version_counts, key=lambda k: desired_version_counts[k])
813                         if majority_desired_version not in [0, 1]:
814                             print >>sys.stderr, '#'*40
815                             print >>sys.stderr, '>>> WARNING: A MAJORITY OF SHARES CONTAIN A VOTE FOR AN UNSUPPORTED SHARE IMPLEMENTATION! (v%i with %i%% support)' % (
816                                 majority_desired_version, 100*desired_version_counts[majority_desired_version]/sum(desired_version_counts.itervalues()))
817                             print >>sys.stderr, '>>> An upgrade is likely necessary. Check http://p2pool.forre.st/ for more information.'
818                             print >>sys.stderr, '#'*40
819                     
820                     if this_str != last_str or time.time() > last_time + 15:
821                         print this_str
822                         last_str = this_str
823                         last_time = time.time()
824                 except:
825                     log.err()
826         status_thread()
827     except:
828         reactor.stop()
829         log.err(None, 'Fatal error:')
830
831 def run():
832     class FixedArgumentParser(argparse.ArgumentParser):
833         def _read_args_from_files(self, arg_strings):
834             # expand arguments referencing files
835             new_arg_strings = []
836             for arg_string in arg_strings:
837                 
838                 # for regular arguments, just add them back into the list
839                 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
840                     new_arg_strings.append(arg_string)
841                 
842                 # replace arguments referencing files with the file content
843                 else:
844                     try:
845                         args_file = open(arg_string[1:])
846                         try:
847                             arg_strings = []
848                             for arg_line in args_file.read().splitlines():
849                                 for arg in self.convert_arg_line_to_args(arg_line):
850                                     arg_strings.append(arg)
851                             arg_strings = self._read_args_from_files(arg_strings)
852                             new_arg_strings.extend(arg_strings)
853                         finally:
854                             args_file.close()
855                     except IOError:
856                         err = sys.exc_info()[1]
857                         self.error(str(err))
858             
859             # return the modified argument list
860             return new_arg_strings
861         
862         def convert_arg_line_to_args(self, arg_line):
863             return [arg for arg in arg_line.split() if arg.strip()]
864     
865     
866     realnets=dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
867     
868     parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
869     parser.add_argument('--version', action='version', version=p2pool.__version__)
870     parser.add_argument('--net',
871         help='use specified network (default: bitcoin)',
872         action='store', choices=sorted(realnets), default='bitcoin', dest='net_name')
873     parser.add_argument('--testnet',
874         help='''use the network's testnet''',
875         action='store_const', const=True, default=False, dest='testnet')
876     parser.add_argument('--debug',
877         help='enable debugging mode',
878         action='store_const', const=True, default=False, dest='debug')
879     parser.add_argument('-a', '--address',
880         help='generate payouts to this address (default: <address requested from bitcoind>)',
881         type=str, action='store', default=None, dest='address')
882     parser.add_argument('--datadir',
883         help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
884         type=str, action='store', default=None, dest='datadir')
885     parser.add_argument('--logfile',
886         help='''log to this file (default: data/<NET>/log)''',
887         type=str, action='store', default=None, dest='logfile')
888     parser.add_argument('--merged',
889         help='call getauxblock on this url to get work for merged mining (example: http://ncuser:ncpass@127.0.0.1:10332/)',
890         type=str, action='append', default=[], dest='merged_urls')
891     parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
892         help='donate this percentage of work to author of p2pool (default: 0.5)',
893         type=float, action='store', default=0.5, dest='donation_percentage')
894     parser.add_argument('--irc-announce',
895         help='announce any blocks found on irc://irc.freenode.net/#p2pool',
896         action='store_true', default=False, dest='irc_announce')
897     
898     p2pool_group = parser.add_argument_group('p2pool interface')
899     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
900         help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
901         type=int, action='store', default=None, dest='p2pool_port')
902     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
903         help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
904         type=str, action='append', default=[], dest='p2pool_nodes')
905     parser.add_argument('--disable-upnp',
906         help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
907         action='store_false', default=True, dest='upnp')
908     p2pool_group.add_argument('--max-conns', metavar='CONNS',
909         help='maximum incoming connections (default: 40)',
910         type=int, action='store', default=40, dest='p2pool_conns')
911     
912     worker_group = parser.add_argument_group('worker interface')
913     worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT',
914         help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
915         type=str, action='store', default=None, dest='worker_endpoint')
916     worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
917         help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
918         type=float, action='store', default=0, dest='worker_fee')
919     
920     bitcoind_group = parser.add_argument_group('bitcoind interface')
921     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
922         help='connect to this address (default: 127.0.0.1)',
923         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
924     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
925         help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
926         type=int, action='store', default=None, dest='bitcoind_rpc_port')
927     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
928         help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
929         type=int, action='store', default=None, dest='bitcoind_p2p_port')
930     
931     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
932         help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
933         type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
934     
935     args = parser.parse_args()
936     
937     if args.debug:
938         p2pool.DEBUG = True
939     
940     net_name = args.net_name + ('_testnet' if args.testnet else '')
941     net = networks.nets[net_name]
942     
943     datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
944     if not os.path.exists(datadir_path):
945         os.makedirs(datadir_path)
946     
947     if len(args.bitcoind_rpc_userpass) > 2:
948         parser.error('a maximum of two arguments are allowed')
949     args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
950     
951     if args.bitcoind_rpc_password is None:
952         if not hasattr(net.PARENT, 'CONF_FILE_FUNC'):
953             parser.error('This network has no configuration file function. Manually enter your RPC password.')
954         conf_path = net.PARENT.CONF_FILE_FUNC()
955         if not os.path.exists(conf_path):
956             parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
957                 '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
958                 '''\r\n'''
959                 '''server=1\r\n'''
960                 '''rpcpassword=%x''' % (conf_path, random.randrange(2**128)))
961         with open(conf_path, 'rb') as f:
962             cp = ConfigParser.RawConfigParser()
963             cp.readfp(StringIO.StringIO('[x]\r\n' + f.read()))
964             for conf_name, var_name, var_type in [
965                 ('rpcuser', 'bitcoind_rpc_username', str),
966                 ('rpcpassword', 'bitcoind_rpc_password', str),
967                 ('rpcport', 'bitcoind_rpc_port', int),
968                 ('port', 'bitcoind_p2p_port', int),
969             ]:
970                 if getattr(args, var_name) is None and cp.has_option('x', conf_name):
971                     setattr(args, var_name, var_type(cp.get('x', conf_name)))
972         if args.bitcoind_rpc_password is None:
973             parser.error('''Bitcoin configuration file didn't contain an rpcpassword= line! Add one!''')
974     
975     if args.bitcoind_rpc_username is None:
976         args.bitcoind_rpc_username = ''
977     
978     if args.bitcoind_rpc_port is None:
979         args.bitcoind_rpc_port = net.PARENT.RPC_PORT
980     
981     if args.bitcoind_p2p_port is None:
982         args.bitcoind_p2p_port = net.PARENT.P2P_PORT
983     
984     if args.p2pool_port is None:
985         args.p2pool_port = net.P2P_PORT
986     
987     if args.worker_endpoint is None:
988         worker_endpoint = '', net.WORKER_PORT
989     elif ':' not in args.worker_endpoint:
990         worker_endpoint = '', int(args.worker_endpoint)
991     else:
992         addr, port = args.worker_endpoint.rsplit(':', 1)
993         worker_endpoint = addr, int(port)
994     
995     if args.address is not None:
996         try:
997             args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
998         except Exception, e:
999             parser.error('error parsing address: ' + repr(e))
1000     else:
1001         args.pubkey_hash = None
1002     
1003     def separate_url(url):
1004         s = urlparse.urlsplit(url)
1005         if '@' not in s.netloc:
1006             parser.error('merged url netloc must contain an "@"')
1007         userpass, new_netloc = s.netloc.rsplit('@', 1)
1008         return urlparse.urlunsplit(s._replace(netloc=new_netloc)), userpass
1009     merged_urls = map(separate_url, args.merged_urls)
1010     
1011     if args.logfile is None:
1012         args.logfile = os.path.join(datadir_path, 'log')
1013     
1014     logfile = logging.LogFile(args.logfile)
1015     pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
1016     sys.stdout = logging.AbortPipe(pipe)
1017     sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
1018     if hasattr(signal, "SIGUSR1"):
1019         def sigusr1(signum, frame):
1020             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
1021             logfile.reopen()
1022             print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
1023         signal.signal(signal.SIGUSR1, sigusr1)
1024     task.LoopingCall(logfile.reopen).start(5)
1025     
1026     reactor.callWhenRunning(main, args, net, datadir_path, merged_urls, worker_endpoint)
1027     reactor.run()