automatic gleaning of rpcport, p2pport, rpcuser, rpcpassword from bitcoin.conf
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2 # coding=utf-8
3
4 from __future__ import division
5
6 import ConfigParser
7 import StringIO
8 import argparse
9 import codecs
10 import datetime
11 import os
12 import random
13 import struct
14 import sys
15 import time
16 import json
17 import signal
18 import traceback
19
20 from twisted.internet import defer, reactor, protocol, task
21 from twisted.web import server, resource
22 from twisted.python import log
23 from nattraverso import portmapper, ipdiscover
24
25 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
26 from bitcoin import worker_interface
27 from util import expiring_dict, jsonrpc, variable, deferral, math
28 from . import p2p, networks, graphs
29 import p2pool, p2pool.data as p2pool_data
30
31 @deferral.retry('Error getting work from bitcoind:', 3)
32 @defer.inlineCallbacks
33 def getwork(bitcoind):
34     work = yield bitcoind.rpc_getmemorypool()
35     defer.returnValue(dict(
36         version=work['version'],
37         previous_block_hash=int(work['previousblockhash'], 16),
38         transactions=[bitcoin_data.tx_type.unpack(x.decode('hex')) for x in work['transactions']],
39         subsidy=work['coinbasevalue'],
40         time=work['time'],
41         bits=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
42         coinbaseflags=work['coinbaseflags'].decode('hex') if 'coinbaseflags' in work else ''.join(x.decode('hex') for x in work['coinbaseaux'].itervalues()) if 'coinbaseaux' in work else '',
43     ))
44
45 @defer.inlineCallbacks
46 def main(args, net, datadir_path):
47     try:
48         my_share_hashes = set()
49         my_doa_share_hashes = set()
50         p2pool_data.OkayTrackerDelta.my_share_hashes = my_share_hashes
51         p2pool_data.OkayTrackerDelta.my_doa_share_hashes = my_doa_share_hashes
52         
53         print 'p2pool (version %s)' % (p2pool.__version__,)
54         print
55         try:
56             from . import draw
57         except ImportError:
58             draw = None
59             print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
60             print
61         
62         # connect to bitcoind over JSON-RPC and do initial getmemorypool
63         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
64         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
65         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
66         good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.PARENT.RPC_CHECK)(bitcoind)
67         if not good:
68             print >>sys.stderr, "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
69             return
70         temp_work = yield getwork(bitcoind)
71         print '    ...success!'
72         print '    Current block hash: %x' % (temp_work['previous_block_hash'],)
73         print
74         
75         # connect to bitcoind over bitcoin-p2p
76         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
77         factory = bitcoin_p2p.ClientFactory(net.PARENT)
78         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
79         yield factory.getProtocol() # waits until handshake is successful
80         print '    ...success!'
81         print
82         
83         if args.pubkey_hash is None:
84             print 'Getting payout address from bitcoind...'
85             my_script = yield deferral.retry('Error getting payout address from bitcoind:', 5)(defer.inlineCallbacks(lambda: defer.returnValue(
86                 bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net.PARENT)))
87             ))()
88         else:
89             print 'Computing payout script from provided address....'
90             my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
91         print '    ...success!'
92         print '    Payout script:', bitcoin_data.script2_to_human(my_script, net.PARENT)
93         print
94         
95         ht = bitcoin_p2p.HeightTracker(bitcoind, factory)
96         
97         tracker = p2pool_data.OkayTracker(net)
98         shared_share_hashes = set()
99         ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
100         known_verified = set()
101         recent_blocks = []
102         print "Loading shares..."
103         for i, (mode, contents) in enumerate(ss.get_shares()):
104             if mode == 'share':
105                 if contents.hash in tracker.shares:
106                     continue
107                 shared_share_hashes.add(contents.hash)
108                 contents.time_seen = 0
109                 tracker.add(contents)
110                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
111                     print "    %i" % (len(tracker.shares),)
112             elif mode == 'verified_hash':
113                 known_verified.add(contents)
114             else:
115                 raise AssertionError()
116         print "    ...inserting %i verified shares..." % (len(known_verified),)
117         for h in known_verified:
118             if h not in tracker.shares:
119                 ss.forget_verified_share(h)
120                 continue
121             tracker.verified.add(tracker.shares[h])
122         print "    ...done loading %i shares!" % (len(tracker.shares),)
123         print
124         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
125         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
126         tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
127         
128         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
129         
130         pre_current_work = variable.Variable(None)
131         pre_merged_work = variable.Variable(None)
132         # information affecting work that should trigger a long-polling update
133         current_work = variable.Variable(None)
134         # information affecting work that should not trigger a long-polling update
135         current_work2 = variable.Variable(None)
136         
137         requested = expiring_dict.ExpiringDict(300)
138         
139         @defer.inlineCallbacks
140         def set_real_work1():
141             work = yield getwork(bitcoind)
142             current_work2.set(dict(
143                 time=work['time'],
144                 transactions=work['transactions'],
145                 subsidy=work['subsidy'],
146                 clock_offset=time.time() - work['time'],
147                 last_update=time.time(),
148             )) # second set first because everything hooks on the first
149             pre_current_work.set(dict(
150                 version=work['version'],
151                 previous_block=work['previous_block_hash'],
152                 bits=work['bits'],
153                 coinbaseflags=work['coinbaseflags'],
154             ))
155         
156         def set_real_work2():
157             best, desired = tracker.think(ht, pre_current_work.value['previous_block'])
158             
159             t = dict(pre_current_work.value)
160             t['best_share_hash'] = best
161             t['aux_work'] = pre_merged_work.value
162             current_work.set(t)
163             
164             t = time.time()
165             for peer2, share_hash in desired:
166                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
167                     continue
168                 last_request_time, count = requested.get(share_hash, (None, 0))
169                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
170                     continue
171                 potential_peers = set()
172                 for head in tracker.tails[share_hash]:
173                     potential_peers.update(peer_heads.get(head, set()))
174                 potential_peers = [peer for peer in potential_peers if peer.connected2]
175                 if count == 0 and peer2 is not None and peer2.connected2:
176                     peer = peer2
177                 else:
178                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
179                     if peer is None:
180                         continue
181                 
182                 print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
183                 peer.send_getshares(
184                     hashes=[share_hash],
185                     parents=2000,
186                     stops=list(set(tracker.heads) | set(
187                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
188                     ))[:100],
189                 )
190                 requested[share_hash] = t, count + 1
191         pre_current_work.changed.watch(lambda _: set_real_work2())
192         
193         print 'Initializing work...'
194         yield set_real_work1()
195         print '    ...success!'
196         print
197         
198         pre_merged_work.changed.watch(lambda _: set_real_work2())
199         ht.updated.watch(set_real_work2)
200         
201         merged_proxy = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,)) if args.merged_url else None
202         
203         @defer.inlineCallbacks
204         def set_merged_work():
205             while True:
206                 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged_proxy.rpc_getauxblock)()
207                 pre_merged_work.set(dict(
208                     hash=int(auxblock['hash'], 16),
209                     target=bitcoin_data.IntType(256).unpack(auxblock['target'].decode('hex')),
210                     chain_id=auxblock['chainid'],
211                 ))
212                 yield deferral.sleep(1)
213         if merged_proxy is not None:
214             set_merged_work()
215         
216         @pre_merged_work.changed.watch
217         def _(new_merged_work):
218             print "Got new merged mining work! Difficulty: %f" % (bitcoin_data.target_to_difficulty(new_merged_work['target']),)
219         
220         # setup p2p logic and join p2pool network
221         
222         class Node(p2p.Node):
223             def handle_shares(self, shares, peer):
224                 if len(shares) > 5:
225                     print 'Processing %i shares...' % (len(shares),)
226                 
227                 new_count = 0
228                 for share in shares:
229                     if share.hash in tracker.shares:
230                         #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
231                         continue
232                     
233                     new_count += 1
234                     
235                     #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
236                     
237                     tracker.add(share)
238                 
239                 if shares and peer is not None:
240                     peer_heads.setdefault(shares[0].hash, set()).add(peer)
241                 
242                 if new_count:
243                     set_real_work2()
244                 
245                 if len(shares) > 5:
246                     print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
247             
248             def handle_share_hashes(self, hashes, peer):
249                 t = time.time()
250                 get_hashes = []
251                 for share_hash in hashes:
252                     if share_hash in tracker.shares:
253                         continue
254                     last_request_time, count = requested.get(share_hash, (None, 0))
255                     if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
256                         continue
257                     print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
258                     get_hashes.append(share_hash)
259                     requested[share_hash] = t, count + 1
260                 
261                 if hashes and peer is not None:
262                     peer_heads.setdefault(hashes[0], set()).add(peer)
263                 if get_hashes:
264                     peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
265             
266             def handle_get_shares(self, hashes, parents, stops, peer):
267                 parents = min(parents, 1000//len(hashes))
268                 stops = set(stops)
269                 shares = []
270                 for share_hash in hashes:
271                     for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))):
272                         if share.hash in stops:
273                             break
274                         shares.append(share)
275                 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
276                 peer.sendShares(shares)
277         
278         @tracker.verified.added.watch
279         def _(share):
280             if share.pow_hash <= share.header['bits'].target:
281                 if factory.conn.value is not None:
282                     factory.conn.value.send_block(block=share.as_block(tracker))
283                 else:
284                     print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Erp!'
285                 print
286                 print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %x' % (p2pool_data.format_hash(share.hash), share.header_hash)
287                 print
288                 recent_blocks.append({ 'ts': share.timestamp, 'hash': '%x' % (share.header_hash) })
289         
290         print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
291         
292         def parse(x):
293             if ':' in x:
294                 ip, port = x.split(':')
295                 return ip, int(port)
296             else:
297                 return x, net.P2P_PORT
298         
299         addrs = {}
300         if os.path.exists(os.path.join(datadir_path, 'addrs.txt')):
301             try:
302                 addrs.update(dict(eval(x) for x in open(os.path.join(datadir_path, 'addrs.txt'))))
303             except:
304                 print >>sys.stderr, "error reading addrs"
305         for addr in map(parse, net.BOOTSTRAP_ADDRS):
306             if addr not in addrs:
307                 addrs[addr] = (0, time.time(), time.time())
308         
309         p2p_node = Node(
310             best_share_hash_func=lambda: current_work.value['best_share_hash'],
311             port=args.p2pool_port,
312             net=net,
313             addr_store=addrs,
314             connect_addrs=set(map(parse, args.p2pool_nodes)),
315         )
316         p2p_node.start()
317         
318         def save_addrs():
319             open(os.path.join(datadir_path, 'addrs.txt'), 'w').writelines(repr(x) + '\n' for x in p2p_node.addr_store.iteritems())
320         task.LoopingCall(save_addrs).start(60)
321         
322         # send share when the chain changes to their chain
323         def work_changed(new_work):
324             #print 'Work changed:', new_work
325             shares = []
326             for share in tracker.get_chain(new_work['best_share_hash'], tracker.get_height(new_work['best_share_hash'])):
327                 if share.hash in shared_share_hashes:
328                     break
329                 shared_share_hashes.add(share.hash)
330                 shares.append(share)
331             
332             for peer in p2p_node.peers.itervalues():
333                 peer.sendShares([share for share in shares if share.peer is not peer])
334         
335         current_work.changed.watch(work_changed)
336         
337         def save_shares():
338             for share in tracker.get_chain(current_work.value['best_share_hash'], min(tracker.get_height(current_work.value['best_share_hash']), 2*net.CHAIN_LENGTH)):
339                 ss.add_share(share)
340                 if share.hash in tracker.verified.shares:
341                     ss.add_verified_hash(share.hash)
342         task.LoopingCall(save_shares).start(60)
343         
344         print '    ...success!'
345         print
346         
347         @defer.inlineCallbacks
348         def upnp_thread():
349             while True:
350                 try:
351                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
352                     if is_lan:
353                         pm = yield portmapper.get_port_mapper()
354                         yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
355                 except defer.TimeoutError:
356                     pass
357                 except:
358                     if p2pool.DEBUG:
359                         log.err(None, "UPnP error:")
360                 yield deferral.sleep(random.expovariate(1/120))
361         
362         if args.upnp:
363             upnp_thread()
364         
365         # start listening for workers with a JSON-RPC server
366         
367         print 'Listening for workers on port %i...' % (args.worker_port,)
368         
369         if os.path.exists(os.path.join(datadir_path, 'vip_pass')):
370             with open(os.path.join(datadir_path, 'vip_pass'), 'rb') as f:
371                 vip_pass = f.read().strip('\r\n')
372         else:
373             vip_pass = '%016x' % (random.randrange(2**64),)
374             with open(os.path.join(datadir_path, 'vip_pass'), 'wb') as f:
375                 f.write(vip_pass)
376         print '    Worker password:', vip_pass, '(only required for generating graphs)'
377         
378         # setup worker logic
379         
380         removed_unstales_var = variable.Variable((0, 0, 0))
381         @tracker.verified.removed.watch
382         def _(share):
383             if share.hash in my_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
384                 assert share.share_data['stale_info'] in [0, 253, 254] # we made these shares in this instance
385                 removed_unstales_var.set((
386                     removed_unstales_var.value[0] + 1,
387                     removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 253 else 0),
388                     removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 254 else 0),
389                 ))
390         
391         removed_doa_unstales_var = variable.Variable(0)
392         @tracker.verified.removed.watch
393         def _(share):
394             if share.hash in my_doa_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
395                 removed_doa_unstales.set(removed_doa_unstales.value + 1)
396         
397         def get_stale_counts():
398             '''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)'''
399             my_shares = len(my_share_hashes)
400             my_doa_shares = len(my_doa_share_hashes)
401             delta = tracker.verified.get_delta(current_work.value['best_share_hash'])
402             my_shares_in_chain = delta.my_count + removed_unstales_var.value[0]
403             my_doa_shares_in_chain = delta.my_doa_count + removed_doa_unstales_var.value
404             orphans_recorded_in_chain = delta.my_orphan_announce_count + removed_unstales_var.value[1]
405             doas_recorded_in_chain = delta.my_dead_announce_count + removed_unstales_var.value[2]
406             
407             my_shares_not_in_chain = my_shares - my_shares_in_chain
408             my_doa_shares_not_in_chain = my_doa_shares - my_doa_shares_in_chain
409             
410             return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
411         
412         class WorkerBridge(worker_interface.WorkerBridge):
413             def __init__(self):
414                 worker_interface.WorkerBridge.__init__(self)
415                 self.new_work_event = current_work.changed
416                 
417                 self.merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
418                 self.recent_shares_ts_work = []
419             
420             def _get_payout_script_from_username(self, user):
421                 if user is None:
422                     return None
423                 try:
424                     pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, net.PARENT)
425                 except: # XXX blah
426                     return None
427                 return bitcoin_data.pubkey_hash_to_script2(pubkey_hash)
428             
429             def preprocess_request(self, request):
430                 payout_script = self._get_payout_script_from_username(request.getUser())
431                 if payout_script is None or random.uniform(0, 100) < args.worker_fee:
432                     payout_script = my_script
433                 return payout_script,
434             
435             def get_work(self, payout_script):
436                 if len(p2p_node.peers) == 0 and net.PERSIST:
437                     raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
438                 if current_work.value['best_share_hash'] is None and net.PERSIST:
439                     raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
440                 if time.time() > current_work2.value['last_update'] + 60:
441                     raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
442                 
443                 share_info, generate_tx = p2pool_data.generate_transaction(
444                     tracker=tracker,
445                     share_data=dict(
446                         previous_share_hash=current_work.value['best_share_hash'],
447                         coinbase=(('' if current_work.value['aux_work'] is None else
448                             '\xfa\xbemm' + bitcoin_data.IntType(256, 'big').pack(current_work.value['aux_work']['hash']) + struct.pack('<ii', 1, 0)) + current_work.value['coinbaseflags'])[:100],
449                         nonce=struct.pack('<Q', random.randrange(2**64)),
450                         new_script=payout_script,
451                         subsidy=current_work2.value['subsidy'],
452                         donation=math.perfect_round(65535*args.donation_percentage/100),
453                         stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
454                             253 if orphans > orphans_recorded_in_chain else
455                             254 if doas > doas_recorded_in_chain else
456                             0
457                         )(*get_stale_counts()),
458                     ),
459                     block_target=current_work.value['bits'].target,
460                     desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
461                     net=net,
462                 )
463                 
464                 target = 2**256//2**32 - 1
465                 if len(self.recent_shares_ts_work) == 50:
466                     hash_rate = sum(work for ts, work in self.recent_shares_ts_work)//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
467                     target = min(target, 2**256//(hash_rate * 5))
468                 target = max(target, share_info['bits'].target)
469                 if current_work.value['aux_work']:
470                     target = max(target, current_work.value['aux_work']['target'])
471                 
472                 transactions = [generate_tx] + list(current_work2.value['transactions'])
473                 merkle_root = bitcoin_data.merkle_hash(map(bitcoin_data.tx_type.hash256, transactions))
474                 self.merkle_root_to_transactions[merkle_root] = share_info, transactions, time.time(), current_work.value['aux_work'], target
475                 
476                 print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (
477                     bitcoin_data.target_to_difficulty(target),
478                     bitcoin_data.target_to_difficulty(share_info['bits'].target),
479                     (sum(t['value'] for t in generate_tx['tx_outs'] if t['script'] == payout_script) - current_work2.value['subsidy']//200)*1e-8, net.PARENT.SYMBOL,
480                     current_work2.value['subsidy']*1e-8, net.PARENT.SYMBOL,
481                     len(current_work2.value['transactions']),
482                 )
483                 
484                 return bitcoin_getwork.BlockAttempt(
485                     version=current_work.value['version'],
486                     previous_block=current_work.value['previous_block'],
487                     merkle_root=merkle_root,
488                     timestamp=current_work2.value['time'],
489                     bits=current_work.value['bits'],
490                     share_target=target,
491                 )
492             
493             def got_response(self, header, request):
494                 # match up with transactions
495                 if header['merkle_root'] not in self.merkle_root_to_transactions:
496                     print >>sys.stderr, '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
497                     return False
498                 share_info, transactions, getwork_time, aux_work, target = self.merkle_root_to_transactions[header['merkle_root']]
499                 
500                 pow_hash = net.PARENT.POW_FUNC(header)
501                 on_time = current_work.value['best_share_hash'] == share_info['share_data']['previous_share_hash']
502                 
503                 try:
504                     if pow_hash <= header['bits'].target or p2pool.DEBUG:
505                         if factory.conn.value is not None:
506                             factory.conn.value.send_block(block=dict(header=header, txs=transactions))
507                         else:
508                             print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Erp!'
509                         if pow_hash <= header['bits'].target:
510                             print
511                             print 'GOT BLOCK FROM MINER! Passing to bitcoind! bitcoin: %x' % (bitcoin_data.block_header_type.hash256(header),)
512                             print
513                             recent_blocks.append({ 'ts': time.time(), 'hash': '%x' % (bitcoin_data.block_header_type.hash256(header),) })
514                 except:
515                     log.err(None, 'Error while processing potential block:')
516                 
517                 try:
518                     if aux_work is not None and (pow_hash <= aux_work['target'] or p2pool.DEBUG):
519                         assert bitcoin_data.IntType(256, 'big').pack(aux_work['hash']).encode('hex') == transactions[0]['tx_ins'][0]['script'][4:4+32].encode('hex')
520                         df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(merged_proxy.rpc_getauxblock)(
521                             bitcoin_data.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
522                             bitcoin_data.aux_pow_type.pack(dict(
523                                 merkle_tx=dict(
524                                     tx=transactions[0],
525                                     block_hash=bitcoin_data.block_header_type.hash256(header),
526                                     merkle_branch=bitcoin_data.calculate_merkle_branch(map(bitcoin_data.tx_type.hash256, transactions), 0),
527                                     index=0,
528                                 ),
529                                 merkle_branch=[],
530                                 index=0,
531                                 parent_block_header=header,
532                             )).encode('hex'),
533                         )
534                         @df.addCallback
535                         def _(result):
536                             if result != (pow_hash <= aux_work['target']):
537                                 print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
538                             else:
539                                 print 'Merged block submittal result: %s' % (result,)
540                         @df.addErrback
541                         def _(err):
542                             log.err(err, 'Error submitting merged block:')
543                 except:
544                     log.err(None, 'Error while processing merged mining POW:')
545                 
546                 if pow_hash <= share_info['bits'].target:
547                     share = p2pool_data.Share(net, header, share_info, other_txs=transactions[1:])
548                     print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
549                         request.getUser(),
550                         p2pool_data.format_hash(share.hash),
551                         p2pool_data.format_hash(share.previous_hash),
552                         time.time() - getwork_time,
553                         ' DEAD ON ARRIVAL' if not on_time else '',
554                     )
555                     my_share_hashes.add(share.hash)
556                     if not on_time:
557                         my_doa_share_hashes.add(share.hash)
558                     p2p_node.handle_shares([share], None)
559                     try:
560                         if pow_hash <= header['bits'].target:
561                             for peer in p2p_node.peers.itervalues():
562                                 peer.sendShares([share])
563                             shared_share_hashes.add(share.hash)
564                     except:
565                         log.err(None, 'Error forwarding block solution:')
566                 
567                 if pow_hash <= target:
568                     reactor.callLater(1, grapher.add_localrate_point, bitcoin_data.target_to_average_attempts(target), not on_time)
569                     if request.getPassword() == vip_pass:
570                         reactor.callLater(1, grapher.add_localminer_point, request.getUser(), bitcoin_data.target_to_average_attempts(target), not on_time)
571                     self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
572                     while len(self.recent_shares_ts_work) > 50:
573                         self.recent_shares_ts_work.pop(0)
574                 
575                 if pow_hash > target:
576                     print 'Worker submitted share with hash > target:'
577                     print '    Hash:   %56x' % (pow_hash,)
578                     print '    Target: %56x' % (target,)
579                 
580                 return on_time
581         
582         web_root = resource.Resource()
583         worker_interface.WorkerInterface(WorkerBridge()).attach_to(web_root)
584         
585         def get_rate():
586             if tracker.get_height(current_work.value['best_share_hash']) < 720:
587                 return json.dumps(None)
588             return json.dumps(p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
589                 / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720)))
590         
591         def get_users():
592             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
593             weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
594             res = {}
595             for script in sorted(weights, key=lambda s: weights[s]):
596                 res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
597             return json.dumps(res)
598         
599         def get_current_txouts():
600             wb = WorkerBridge()
601             tmp_tag = str(random.randrange(2**64))
602             outputs = wb.merkle_root_to_transactions[wb.get_work(tmp_tag).merkle_root][1][0]['tx_outs']
603             total = sum(out['value'] for out in outputs)
604             total_without_tag = sum(out['value'] for out in outputs if out['script'] != tmp_tag)
605             total_diff = total - total_without_tag
606             return dict((out['script'], out['value'] + math.perfect_round(out['value']*total_diff/total)) for out in outputs if out['script'] != tmp_tag and out['value'])
607         
608         def get_current_scaled_txouts(scale, trunc=0):
609             txouts = get_current_txouts()
610             total = sum(txouts.itervalues())
611             results = dict((script, value*scale//total) for script, value in txouts.iteritems())
612             if trunc > 0:
613                 total_random = 0
614                 random_set = set()
615                 for s in sorted(results, key=results.__getitem__):
616                     total_random += results[s]
617                     random_set.add(s)
618                     if total_random >= trunc and results[s] >= trunc:
619                         break
620                 winner = math.weighted_choice((script, results[script]) for script in random_set)
621                 for script in random_set:
622                     del results[script]
623                 results[winner] = total_random
624             if sum(results.itervalues()) < int(scale):
625                 results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
626             return results
627         
628         def get_current_payouts():
629             return json.dumps(dict((bitcoin_data.script2_to_human(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems()))
630         
631         def get_patron_sendmany(this):
632             try:
633                 if '/' in this:
634                     this, trunc = this.split('/', 1)
635                 else:
636                     trunc = '0.01'
637                 return json.dumps(dict(
638                     (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
639                     for script, value in get_current_scaled_txouts(scale=int(float(this)*1e8), trunc=int(float(trunc)*1e8)).iteritems()
640                     if bitcoin_data.script2_to_address(script, net.PARENT) is not None
641                 ))
642             except:
643                 return json.dumps(None)
644         
645         def get_global_stats():
646             # averaged over last hour
647             lookbehind = 3600//net.SHARE_PERIOD
648             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
649                 return None
650             
651             nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
652             stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
653             return json.dumps(dict(
654                 pool_nonstale_hash_rate=nonstale_hash_rate,
655                 pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
656                 pool_stale_prop=stale_prop,
657             ))
658         
659         def get_local_stats():
660             lookbehind = 3600//net.SHARE_PERIOD
661             if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
662                 return None
663             
664             global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
665             
666             my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
667             my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
668             my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
669             my_share_count = my_unstale_count + my_orphan_count + my_doa_count
670             my_stale_count = my_orphan_count + my_doa_count
671             
672             my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
673             
674             my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
675                 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1)
676                 if share.hash in my_share_hashes)
677             actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp -
678                 tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp)
679             share_att_s = my_work / actual_time
680             
681             return json.dumps(dict(
682                 my_hash_rates_in_last_hour=dict(
683                     nonstale=share_att_s,
684                     rewarded=share_att_s/(1 - global_stale_prop),
685                     actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
686                 ),
687                 my_share_counts_in_last_hour=dict(
688                     shares=my_share_count,
689                     unstale_shares=my_unstale_count,
690                     stale_shares=my_stale_count,
691                     orphan_stale_shares=my_orphan_count,
692                     doa_stale_shares=my_doa_count,
693                 ),
694                 my_stale_proportions_in_last_hour=dict(
695                     stale=my_stale_prop,
696                     orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
697                     dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
698                 ),
699             ))
700         
701         def get_peer_addresses():
702             return ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues())
703         
704         class WebInterface(resource.Resource):
705             def __init__(self, func, mime_type, *fields):
706                 self.func, self.mime_type, self.fields = func, mime_type, fields
707             
708             def render_GET(self, request):
709                 request.setHeader('Content-Type', self.mime_type)
710                 request.setHeader('Access-Control-Allow-Origin', '*')
711                 return self.func(*(request.args[field][0] for field in self.fields))
712         
713         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
714         web_root.putChild('users', WebInterface(get_users, 'application/json'))
715         web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
716         web_root.putChild('current_payouts', WebInterface(get_current_payouts, 'application/json'))
717         web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain', 'total'))
718         web_root.putChild('global_stats', WebInterface(get_global_stats, 'application/json'))
719         web_root.putChild('local_stats', WebInterface(get_local_stats, 'application/json'))
720         web_root.putChild('peer_addresses', WebInterface(get_peer_addresses, 'text/plain'))
721         web_root.putChild('payout_addr', WebInterface(lambda: json.dumps(bitcoin_data.script2_to_human(my_script, net.PARENT)), 'application/json'))
722         web_root.putChild('recent_blocks', WebInterface(lambda: json.dumps(recent_blocks), 'application/json'))
723         if draw is not None:
724             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
725         
726         grapher = graphs.Grapher(os.path.join(datadir_path, 'rrd'))
727         web_root.putChild('graphs', grapher.get_resource())
728         def add_point():
729             if tracker.get_height(current_work.value['best_share_hash']) < 720:
730                 return
731             grapher.add_poolrate_point(p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
732                 / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720)))
733         task.LoopingCall(add_point).start(100)
734         
735         reactor.listenTCP(args.worker_port, server.Site(web_root))
736         
737         print '    ...success!'
738         print
739         
740         
741         @defer.inlineCallbacks
742         def work_poller():
743             while True:
744                 flag = factory.new_block.get_deferred()
745                 try:
746                     yield set_real_work1()
747                 except:
748                     log.err()
749                 yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
750         work_poller()
751         
752         
753         # done!
754         print 'Started successfully!'
755         print
756         
757         
758         if hasattr(signal, 'SIGALRM'):
759             signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
760                 sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
761             ))
762             signal.siginterrupt(signal.SIGALRM, False)
763             task.LoopingCall(signal.alarm, 30).start(1)
764         
765         if args.irc_announce:
766             from twisted.words.protocols import irc
767             class IRCClient(irc.IRCClient):
768                 nickname = 'p2pool'
769                 def lineReceived(self, line):
770                     print repr(line)
771                     irc.IRCClient.lineReceived(self, line)
772                 def signedOn(self):
773                     irc.IRCClient.signedOn(self)
774                     self.factory.resetDelay()
775                     self.join('#p2pool')
776                     self.watch_id = current_work.changed.watch(self._work_changed)
777                     self.announced_hashes = set()
778                 def _work_changed(self, new_work):
779                     share = tracker.shares[new_work['best_share_hash']]
780                     if share.pow_hash <= share.header['bits'].target and share.header_hash not in self.announced_hashes:
781                         self.privmsg('#p2pool', '\x033,4BLOCK FOUND! http://blockexplorer.com/block/' + bitcoin_data.IntType(256, 'big').pack(share.header_hash).encode('hex'))
782                 def connectionLost(self, reason):
783                     current_work.changed.unwatch(self.watch_id)
784             class IRCClientFactory(protocol.ReconnectingClientFactory):
785                 protocol = IRCClient
786             reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())
787         
788         @defer.inlineCallbacks
789         def status_thread():
790             last_str = None
791             last_time = 0
792             while True:
793                 yield deferral.sleep(3)
794                 try:
795                     if time.time() > current_work2.value['last_update'] + 60:
796                         print >>sys.stderr, '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
797                     if current_work.value['best_share_hash'] is not None:
798                         height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
799                         if height > 2:
800                             att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720))
801                             weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
802                             (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
803                             stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], min(720, height))
804                             real_att_s = att_s / (1 - stale_prop)
805                             my_att_s = real_att_s*weights.get(my_script, 0)/total_weight
806                             this_str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i (%i incoming)' % (
807                                 math.format(int(real_att_s)),
808                                 height,
809                                 len(tracker.verified.shares),
810                                 len(tracker.shares),
811                                 weights.get(my_script, 0)/total_weight*100,
812                                 math.format(int(my_att_s)),
813                                 shares,
814                                 stale_orphan_shares,
815                                 stale_doa_shares,
816                                 len(p2p_node.peers),
817                                 sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming),
818                             ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
819                             this_str += '\nAverage time between blocks: %.2f days' % (
820                                 2**256 / current_work.value['bits'].target / real_att_s / (60 * 60 * 24),
821                             )
822                             this_str += '\nPool stales: %i%%' % (int(100*stale_prop+.5),)
823                             stale_center, stale_radius = math.binomial_conf_center_radius(stale_orphan_shares + stale_doa_shares, shares, 0.95)
824                             this_str += u' Own: %i±%i%%' % (int(100*stale_center+.5), int(100*stale_radius+.5))
825                             this_str += u' Own efficiency: %i±%i%%' % (int(100*(1 - stale_center)/(1 - stale_prop)+.5), int(100*stale_radius/(1 - stale_prop)+.5))
826                             if this_str != last_str or time.time() > last_time + 15:
827                                 print this_str
828                                 last_str = this_str
829                                 last_time = time.time()
830                 except:
831                     log.err()
832         status_thread()
833     except:
834         log.err(None, 'Fatal error:')
835
836 def run():
837     class FixedArgumentParser(argparse.ArgumentParser):
838         def _read_args_from_files(self, arg_strings):
839             # expand arguments referencing files
840             new_arg_strings = []
841             for arg_string in arg_strings:
842                 
843                 # for regular arguments, just add them back into the list
844                 if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
845                     new_arg_strings.append(arg_string)
846                 
847                 # replace arguments referencing files with the file content
848                 else:
849                     try:
850                         args_file = open(arg_string[1:])
851                         try:
852                             arg_strings = []
853                             for arg_line in args_file.read().splitlines():
854                                 for arg in self.convert_arg_line_to_args(arg_line):
855                                     arg_strings.append(arg)
856                             arg_strings = self._read_args_from_files(arg_strings)
857                             new_arg_strings.extend(arg_strings)
858                         finally:
859                             args_file.close()
860                     except IOError:
861                         err = sys.exc_info()[1]
862                         self.error(str(err))
863             
864             # return the modified argument list
865             return new_arg_strings
866         
867         def convert_arg_line_to_args(self, arg_line):
868             return [arg for arg in arg_line.split() if arg.strip()]
869     
870     parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
871     parser.add_argument('--version', action='version', version=p2pool.__version__)
872     parser.add_argument('--net',
873         help='use specified network (default: bitcoin)',
874         action='store', choices=sorted(networks.realnets), default='bitcoin', dest='net_name')
875     parser.add_argument('--testnet',
876         help='''use the network's testnet''',
877         action='store_const', const=True, default=False, dest='testnet')
878     parser.add_argument('--debug',
879         help='enable debugging mode',
880         action='store_const', const=True, default=False, dest='debug')
881     parser.add_argument('-a', '--address',
882         help='generate payouts to this address (default: <address requested from bitcoind>)',
883         type=str, action='store', default=None, dest='address')
884     parser.add_argument('--logfile',
885         help='''log to this file (default: data/<NET>/log)''',
886         type=str, action='store', default=None, dest='logfile')
887     parser.add_argument('--merged-url',
888         help='call getauxblock on this url to get work for merged mining (example: http://127.0.0.1:10332/)',
889         type=str, action='store', default=None, dest='merged_url')
890     parser.add_argument('--merged-userpass',
891         help='use this user and password when requesting merged mining work (example: ncuser:ncpass)',
892         type=str, action='store', default=None, dest='merged_userpass')
893     parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
894         help='donate this percentage of work to author of p2pool (default: 0.5)',
895         type=float, action='store', default=0.5, dest='donation_percentage')
896     parser.add_argument('--irc-announce',
897         help='announce any blocks found on irc://irc.freenode.net/#p2pool',
898         action='store_true', default=False, dest='irc_announce')
899     
900     p2pool_group = parser.add_argument_group('p2pool interface')
901     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
902         help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.P2P_PORT) for _, n in sorted(networks.realnets.items())),
903         type=int, action='store', default=None, dest='p2pool_port')
904     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
905         help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
906         type=str, action='append', default=[], dest='p2pool_nodes')
907     parser.add_argument('--disable-upnp',
908         help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
909         action='store_false', default=True, dest='upnp')
910     
911     worker_group = parser.add_argument_group('worker interface')
912     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
913         help='listen on PORT for RPC connections from miners (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.WORKER_PORT) for _, n in sorted(networks.realnets.items())),
914         type=int, action='store', default=None, dest='worker_port')
915     worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
916         help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
917         type=float, action='store', default=0, dest='worker_fee')
918     
919     bitcoind_group = parser.add_argument_group('bitcoind interface')
920     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
921         help='connect to this address (default: 127.0.0.1)',
922         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
923     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
924         help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (n.NAME, n.PARENT.RPC_PORT) for _, n in sorted(networks.realnets.items())),
925         type=int, action='store', default=None, dest='bitcoind_rpc_port')
926     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
927         help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (n.NAME, n.PARENT.P2P_PORT) for _, n in sorted(networks.realnets.items())),
928         type=int, action='store', default=None, dest='bitcoind_p2p_port')
929     
930     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
931         help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
932         type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
933     
934     args = parser.parse_args()
935     
936     if args.debug:
937         p2pool.DEBUG = True
938     
939     net = networks.nets[args.net_name + ('_testnet' if args.testnet else '')]
940     
941     datadir_path = os.path.join(os.path.dirname(sys.argv[0]), 'data', net.NAME)
942     if not os.path.exists(datadir_path):
943         os.makedirs(datadir_path)
944     
945     if args.logfile is None:
946         args.logfile = os.path.join(datadir_path, 'log')
947     
948     class EncodeReplacerPipe(object):
949         def __init__(self, inner_file):
950             self.inner_file = inner_file
951             self.softspace = 0
952         def write(self, data):
953             if isinstance(data, unicode):
954                 try:
955                     data = data.encode(self.inner_file.encoding, 'replace')
956                 except:
957                     data = data.encode('ascii', 'replace')
958             self.inner_file.write(data)
959         def flush(self):
960             self.inner_file.flush()
961     class LogFile(object):
962         def __init__(self, filename):
963             self.filename = filename
964             self.inner_file = None
965             self.reopen()
966         def reopen(self):
967             if self.inner_file is not None:
968                 self.inner_file.close()
969             open(self.filename, 'a').close()
970             f = open(self.filename, 'rb')
971             f.seek(0, os.SEEK_END)
972             length = f.tell()
973             if length > 100*1000*1000:
974                 f.seek(-1000*1000, os.SEEK_END)
975                 while True:
976                     if f.read(1) in ('', '\n'):
977                         break
978                 data = f.read()
979                 f.close()
980                 f = open(self.filename, 'wb')
981                 f.write(data)
982             f.close()
983             self.inner_file = codecs.open(self.filename, 'a', 'utf-8')
984         def write(self, data):
985             self.inner_file.write(data)
986         def flush(self):
987             self.inner_file.flush()
988     class TeePipe(object):
989         def __init__(self, outputs):
990             self.outputs = outputs
991         def write(self, data):
992             for output in self.outputs:
993                 output.write(data)
994         def flush(self):
995             for output in self.outputs:
996                 output.flush()
997     class TimestampingPipe(object):
998         def __init__(self, inner_file):
999             self.inner_file = inner_file
1000             self.buf = ''
1001             self.softspace = 0
1002         def write(self, data):
1003             buf = self.buf + data
1004             lines = buf.split('\n')
1005             for line in lines[:-1]:
1006                 self.inner_file.write('%s %s\n' % (datetime.datetime.now(), line))
1007                 self.inner_file.flush()
1008             self.buf = lines[-1]
1009         def flush(self):
1010             pass
1011     class AbortPipe(object):
1012         def __init__(self, inner_file):
1013             self.inner_file = inner_file
1014             self.softspace = 0
1015         def write(self, data):
1016             try:
1017                 self.inner_file.write(data)
1018             except:
1019                 sys.stdout = sys.__stdout__
1020                 log.DefaultObserver.stderr = sys.stderr = sys.__stderr__
1021                 raise
1022         def flush(self):
1023             self.inner_file.flush()
1024     class PrefixPipe(object):
1025         def __init__(self, inner_file, prefix):
1026             self.inner_file = inner_file
1027             self.prefix = prefix
1028             self.buf = ''
1029             self.softspace = 0
1030         def write(self, data):
1031             buf = self.buf + data
1032             lines = buf.split('\n')
1033             for line in lines[:-1]:
1034                 self.inner_file.write(self.prefix + line + '\n')
1035                 self.inner_file.flush()
1036             self.buf = lines[-1]
1037         def flush(self):
1038             pass
1039     logfile = LogFile(args.logfile)
1040     pipe = TimestampingPipe(TeePipe([EncodeReplacerPipe(sys.stderr), logfile]))
1041     sys.stdout = AbortPipe(pipe)
1042     sys.stderr = log.DefaultObserver.stderr = AbortPipe(PrefixPipe(pipe, '> '))
1043     if hasattr(signal, "SIGUSR1"):
1044         def sigusr1(signum, frame):
1045             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
1046             logfile.reopen()
1047             print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
1048         signal.signal(signal.SIGUSR1, sigusr1)
1049     task.LoopingCall(logfile.reopen).start(5)
1050     
1051     if len(args.bitcoind_rpc_userpass) > 2:
1052         parser.error('a maximum of two arguments are allowed')
1053     args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
1054     
1055     if args.bitcoind_rpc_password is None:
1056         if not hasattr(net, 'CONF_FILE_FUNC'):
1057             parser.error('This network has no configuration file function. Manually enter your RPC password.')
1058         conf_path = net.CONF_FILE_FUNC()
1059         if not os.path.exists(conf_path):
1060             parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
1061                 '''If you actually haven't created a configuration file, you should create\r\n'''
1062                 '''one at %s with the text:\r\n'''
1063                 '''|    server=true\r\n'''
1064                 '''|    rpcpassword=<A LONG RANDOM PASSWORD THAT YOU DON'T HAVE TO REMEMBER>''' % (net.CONF_FILE_FUNC(),))
1065         with open(conf_path, 'rb') as f:
1066             cp = ConfigParser.RawConfigParser()
1067             cp.readfp(StringIO.StringIO('[x]\r\n' + open('/home/forrest/.bitcoin/bitcoin.conf', 'rb').read()))
1068             for conf_name, var_name, var_type in [
1069                 ('rpcuser', 'bitcoind_rpc_username', str),
1070                 ('rpcpassword', 'bitcoind_rpc_password', str),
1071                 ('rpcport', 'bitcoind_rpc_port', int),
1072                 ('port', 'bitcoind_p2p_port', int),
1073             ]:
1074                 if getattr(args, var_name) is None and cp.has_option('x', conf_name):
1075                     setattr(args, var_name, var_type(cp.get('x', conf_name)))
1076     
1077     if args.bitcoind_rpc_username is None:
1078         args.bitcoind_rpc_username = ''
1079     
1080     if args.bitcoind_rpc_port is None:
1081         args.bitcoind_rpc_port = net.PARENT.RPC_PORT
1082     
1083     if args.bitcoind_p2p_port is None:
1084         args.bitcoind_p2p_port = net.PARENT.P2P_PORT
1085     
1086     if args.p2pool_port is None:
1087         args.p2pool_port = net.P2P_PORT
1088     
1089     if args.worker_port is None:
1090         args.worker_port = net.WORKER_PORT
1091     
1092     if args.address is not None:
1093         try:
1094             args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
1095         except Exception, e:
1096             parser.error('error parsing address: ' + repr(e))
1097     else:
1098         args.pubkey_hash = None
1099     
1100     if (args.merged_url is None) ^ (args.merged_userpass is None):
1101         parser.error('must specify --merged-url and --merged-userpass')
1102     
1103     reactor.callWhenRunning(main, args, net, datadir_path)
1104     reactor.run()