separated p2pool and bitcoin network definitions
[p2pool.git] / p2pool / main.py
index b2fdbd4..491926e 100644 (file)
@@ -4,11 +4,10 @@
 from __future__ import division
 
 import argparse
+import codecs
 import datetime
-import itertools
 import os
 import random
-import sqlite3
 import struct
 import sys
 import time
@@ -23,7 +22,7 @@ from nattraverso import portmapper, ipdiscover
 
 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
 from bitcoin import worker_interface
-from util import db, expiring_dict, jsonrpc, variable, deferral, math
+from util import expiring_dict, jsonrpc, variable, deferral, math
 from . import p2p, skiplists, networks
 import p2pool, p2pool.data as p2pool_data
 
@@ -37,22 +36,22 @@ def getwork(bitcoind):
         transactions=[bitcoin_data.tx_type.unpack(x.decode('hex')) for x in work['transactions']],
         subsidy=work['coinbasevalue'],
         time=work['time'],
-        target=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
+        bits=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
     ))
 
 @deferral.retry('Error creating payout script:', 10)
 @defer.inlineCallbacks
-def get_payout_script2(bitcoind, net):
+def get_payout_script2(bitcoind, net2):
     address = yield bitcoind.rpc_getaccountaddress('p2pool')
     validate_response = yield bitcoind.rpc_validateaddress(address)
     if 'pubkey' not in validate_response:
         print '    Pubkey request failed. Falling back to payout to address.'
-        defer.returnValue(bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(address, net)))
+        defer.returnValue(bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(address, net2)))
     pubkey = validate_response['pubkey'].decode('hex')
     defer.returnValue(bitcoin_data.pubkey_to_script2(pubkey))
 
 @defer.inlineCallbacks
-def main(args, net):
+def main(args, net, datadir_path):
     try:
         print 'p2pool (version %s)' % (p2pool.__version__,)
         print
@@ -67,7 +66,7 @@ def main(args, net):
         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
-        good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.BITCOIN_RPC_CHECK)(bitcoind)
+        good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.PARENT.RPC_CHECK)(bitcoind)
         if not good:
             print "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
             return
@@ -78,7 +77,7 @@ def main(args, net):
         
         # connect to bitcoind over bitcoin-p2p
         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
-        factory = bitcoin_p2p.ClientFactory(net)
+        factory = bitcoin_p2p.ClientFactory(net.PARENT)
         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
         yield factory.getProtocol() # waits until handshake is successful
         print '    ...success!'
@@ -86,22 +85,19 @@ def main(args, net):
         
         if args.pubkey_hash is None:
             print 'Getting payout address from bitcoind...'
-            my_script = yield get_payout_script2(bitcoind, net)
+            my_script = yield get_payout_script2(bitcoind, net.PARENT)
         else:
             print 'Computing payout script from provided address....'
             my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
         print '    ...success!'
-        print '    Payout script:', bitcoin_data.script2_to_human(my_script, net)
+        print '    Payout script:', bitcoin_data.script2_to_human(my_script, net.PARENT)
         print
         
-        print 'Loading cached block headers...'
-        ht = bitcoin_p2p.HeightTracker(factory, net.NAME + '_headers.dat')
-        print '   ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
-        print
+        ht = bitcoin_p2p.HeightTracker(bitcoind, factory)
         
         tracker = p2pool_data.OkayTracker(net)
         shared_share_hashes = set()
-        ss = p2pool_data.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), net.NAME + '_shares.'), net)
+        ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
         known_verified = set()
         print "Loading shares..."
         for i, (mode, contents) in enumerate(ss.get_shares()):
@@ -125,8 +121,6 @@ def main(args, net):
             tracker.verified.add(tracker.shares[h])
         print "    ...done loading %i shares!" % (len(tracker.shares),)
         print
-        tracker.added.watch(lambda share: ss.add_share(share))
-        tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
         tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
@@ -158,7 +152,7 @@ def main(args, net):
             pre_current_work.set(dict(
                 version=work['version'],
                 previous_block=work['previous_block_hash'],
-                target=work['target'],
+                bits=work['bits'],
             ))
         
         def set_real_work2():
@@ -198,14 +192,15 @@ def main(args, net):
                 )
                 requested[share_hash] = t, count + 1
         pre_current_work.changed.watch(lambda _: set_real_work2())
-        pre_merged_work.changed.watch(lambda _: set_real_work2())
-        ht.updated.watch(set_real_work2)
         
         print 'Initializing work...'
         yield set_real_work1()
         print '    ...success!'
         print
         
+        pre_merged_work.changed.watch(lambda _: set_real_work2())
+        ht.updated.watch(set_real_work2)
+        
         @defer.inlineCallbacks
         def set_merged_work():
             if not args.merged_url:
@@ -252,9 +247,9 @@ def main(args, net):
         
         @tracker.verified.added.watch
         def _(share):
-            if share.pow_hash <= share.header['target']:
+            if share.pow_hash <= share.header['bits'].target:
                 if factory.conn.value is not None:
-                    factory.conn.value.send_block(block=share.as_block(tracker, net))
+                    factory.conn.value.send_block(block=share.as_block(tracker))
                 else:
                     print 'No bitcoind connection! Erp!'
                 print
@@ -284,14 +279,14 @@ def main(args, net):
             stops = set(stops)
             shares = []
             for share_hash in share_hashes:
-                for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
+                for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))):
                     if share.hash in stops:
                         break
                     shares.append(share)
             print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
             peer.sendShares(shares)
         
-        print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
+        print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
         
         def parse(x):
             if ':' in x:
@@ -309,20 +304,28 @@ def main(args, net):
         for host in [
             'p2pool.forre.st',
             'dabuttonfactory.com',
+            ] + (['liteco.in'] if net.NAME == 'litecoin' else []) + [
         ]:
             try:
                 nodes.add(((yield reactor.resolve(host)), net.P2P_PORT))
             except:
                 log.err(None, 'Error resolving bootstrap node IP:')
         
-        if net.NAME == 'litecoin':
-            nodes.add(((yield reactor.resolve('liteco.in')), net.P2P_PORT))
+        addrs = {}
+        try:
+            addrs = dict(eval(x) for x in open(os.path.join(datadir_path, 'addrs.txt')))
+        except:
+            print "error reading addrs"
+        
+        def save_addrs():
+            open(os.path.join(datadir_path, 'addrs.txt'), 'w').writelines(repr(x) + '\n' for x in addrs.iteritems())
+        task.LoopingCall(save_addrs).start(60)
         
         p2p_node = p2p.Node(
             current_work=current_work,
             port=args.p2pool_port,
             net=net,
-            addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), net.NAME),
+            addr_store=addrs,
             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
         )
         p2p_node.handle_shares = p2p_shares
@@ -335,7 +338,7 @@ def main(args, net):
         def work_changed(new_work):
             #print 'Work changed:', new_work
             shares = []
-            for share in tracker.get_chain_known(new_work['best_share_hash']):
+            for share in tracker.get_chain(new_work['best_share_hash'], tracker.get_height(new_work['best_share_hash'])):
                 if share.hash in shared_share_hashes:
                     break
                 shared_share_hashes.add(share.hash)
@@ -346,6 +349,13 @@ def main(args, net):
         
         current_work.changed.watch(work_changed)
         
+        def save_shares():
+            for share in tracker.get_chain(current_work.value['best_share_hash'], min(tracker.get_height(current_work.value['best_share_hash']), 2*net.CHAIN_LENGTH)):
+                ss.add_share(share)
+                if share.hash in tracker.verified.shares:
+                    ss.add_verified_hash(share.hash)
+        task.LoopingCall(save_shares).start(60)
+        
         print '    ...success!'
         print
         
@@ -394,8 +404,7 @@ def main(args, net):
                 removed_unstales.add(share.hash)
         
         
-        def get_payout_script_from_username(request):
-            user = worker_interface.get_username(request)
+        def get_payout_script_from_username(user):
             if user is None:
                 return None
             try:
@@ -405,8 +414,9 @@ def main(args, net):
         
         def compute(request):
             state = current_work.value
+            user = worker_interface.get_username(request)
             
-            payout_script = get_payout_script_from_username(request)
+            payout_script = get_payout_script_from_username(user)
             if payout_script is None or random.uniform(0, 100) < args.worker_fee:
                 payout_script = my_script
             
@@ -432,23 +442,24 @@ def main(args, net):
                         255 if shares == 0 else math.perfect_round(254*stales/shares)
                     )(*get_share_counts()),
                 ),
-                block_target=state['target'],
+                block_target=state['bits'].target,
                 desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
                 net=net,
             )
             
-            print 'New work for worker! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (
-                bitcoin_data.target_to_difficulty(share_info['target']),
-                (sum(t['value'] for t in generate_tx['tx_outs'] if t['script'] == payout_script) - subsidy//200)*1e-8, net.BITCOIN_SYMBOL,
-                subsidy*1e-8, net.BITCOIN_SYMBOL,
+            print 'New work for worker %s! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (
+                user,
+                bitcoin_data.target_to_difficulty(share_info['bits'].target),
+                (sum(t['value'] for t in generate_tx['tx_outs'] if t['script'] == payout_script) - subsidy//200)*1e-8, net.PARENT.SYMBOL,
+                subsidy*1e-8, net.PARENT.SYMBOL,
                 len(current_work2.value['transactions']),
             )
             
             transactions = [generate_tx] + list(current_work2.value['transactions'])
-            merkle_root = bitcoin_data.merkle_hash(transactions)
+            merkle_root = bitcoin_data.merkle_hash(map(bitcoin_data.tx_type.hash256, transactions))
             merkle_root_to_transactions[merkle_root] = share_info, transactions, time.time()
             
-            return bitcoin_getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, current_work2.value['time'], state['target'], share_info['target']), state['best_share_hash']
+            return bitcoin_getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, current_work2.value['time'], state['bits'], share_info['bits'].target), state['best_share_hash']
         
         my_shares = set()
         doa_shares = set()
@@ -465,14 +476,14 @@ def main(args, net):
                 
                 hash_ = bitcoin_data.block_header_type.hash256(header)
                 
-                pow_hash = net.BITCOIN_POW_FUNC(header)
+                pow_hash = net.PARENT.POW_FUNC(header)
                 
-                if pow_hash <= header['target'] or p2pool.DEBUG:
+                if pow_hash <= header['bits'].target or p2pool.DEBUG:
                     if factory.conn.value is not None:
                         factory.conn.value.send_block(block=dict(header=header, txs=transactions))
                     else:
                         print 'No bitcoind connection! Erp!'
-                    if pow_hash <= header['target']:
+                    if pow_hash <= header['bits'].target:
                         print
                         print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
                         print
@@ -483,7 +494,7 @@ def main(args, net):
                             merkle_tx=dict(
                                 tx=transactions[0],
                                 block_hash=hash_,
-                                merkle_branch=[x['hash'] for x in p2pool_data.calculate_merkle_branch(transactions, 0)],
+                                merkle_branch=[x['hash'] for x in p2pool_data.calculate_merkle_branch(map(bitcoin_data.tx_type.hash256, transactions), 0)],
                                 index=0,
                             ),
                             merkle_branch=[],
@@ -500,9 +511,8 @@ def main(args, net):
                     except:
                         log.err(None, 'Error while processing merged mining POW:')
                 
-                target = share_info['target']
-                if pow_hash > target:
-                    print 'Worker submitted share with hash > target:\nhash  : %x\ntarget: %x' % (pow_hash, target)
+                if pow_hash > share_info['bits'].target:
+                    print 'Worker submitted share with hash > target:\nhash  : %x\ntarget: %x' % (pow_hash, share_info['bits'].target)
                     return False
                 share = p2pool_data.Share(net, header, share_info, other_txs=transactions[1:])
                 my_shares.add(share.hash)
@@ -524,7 +534,7 @@ def main(args, net):
             if current_work.value['best_share_hash'] is not None:
                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
                 att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720))
-                fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
+                fracs = [share.stale_frac for share in tracker.get_chain(current_work.value['best_share_hash'], min(120, height)) if share.stale_frac is not None]
                 return json.dumps(int(att_s / (1. - (math.median(fracs) if fracs else 0))))
             return json.dumps(None)
         
@@ -597,54 +607,52 @@ def main(args, net):
             signal.signal(signal.SIGALRM, watchdog_handler)
             task.LoopingCall(signal.alarm, 30).start(1)
         
-        last_str = None
-        last_time = 0
-        while True:
-            yield deferral.sleep(3)
-            try:
-                if time.time() > current_work2.value['last_update'] + 60:
-                    print '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
-                if current_work.value['best_share_hash'] is not None:
-                    height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
-                    if height > 2:
-                        att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720))
-                        weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
-                        shares, stale_doa_shares, stale_not_doa_shares = get_share_counts(True)
-                        stale_shares = stale_doa_shares + stale_not_doa_shares
-                        fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
-                        this_str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i' % (
-                            math.format(int(att_s / (1. - (math.median(fracs) if fracs else 0)))),
-                            height,
-                            len(tracker.verified.shares),
-                            len(tracker.shares),
-                            weights.get(my_script, 0)/total_weight*100,
-                            math.format(int(weights.get(my_script, 0)*att_s//total_weight / (1. - (math.median(fracs) if fracs else 0)))),
-                            shares,
-                            stale_not_doa_shares,
-                            stale_doa_shares,
-                            len(p2p_node.peers),
-                        ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
-                        if fracs:
-                            med = math.median(fracs)
-                            this_str += '\nPool stales: %i%%' % (int(100*med+.5),)
-                            conf = 0.9
-                            if shares:
-                                this_str += ' Own: %i±%i%%' % tuple(int(100*x+.5) for x in math.interval_to_center_radius(math.binomial_conf_interval(stale_shares, shares, conf)))
-                                if med < .99:
-                                    this_str += ' Own efficiency: %i±%i%%' % tuple(int(100*x+.5) for x in math.interval_to_center_radius((1 - y)/(1 - med) for y in math.binomial_conf_interval(stale_shares, shares, conf)[::-1]))
-                                this_str += ' (%i%% confidence)' % (int(100*conf+.5),)
-                        if this_str != last_str or time.time() > last_time + 15:
-                            print this_str
-                            last_str = this_str
-                            last_time = time.time()
-            
-            
-            except:
-                log.err()
+        @defer.inlineCallbacks
+        def status_thread():
+            last_str = None
+            last_time = 0
+            while True:
+                yield deferral.sleep(3)
+                try:
+                    if time.time() > current_work2.value['last_update'] + 60:
+                        print '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
+                    if current_work.value['best_share_hash'] is not None:
+                        height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
+                        if height > 2:
+                            att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720))
+                            weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
+                            shares, stale_doa_shares, stale_not_doa_shares = get_share_counts(True)
+                            stale_shares = stale_doa_shares + stale_not_doa_shares
+                            fracs = [share.stale_frac for share in tracker.get_chain(current_work.value['best_share_hash'], min(120, height)) if share.stale_frac is not None]
+                            this_str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i' % (
+                                math.format(int(att_s / (1. - (math.median(fracs) if fracs else 0)))),
+                                height,
+                                len(tracker.verified.shares),
+                                len(tracker.shares),
+                                weights.get(my_script, 0)/total_weight*100,
+                                math.format(int(weights.get(my_script, 0)*att_s//total_weight / (1. - (math.median(fracs) if fracs else 0)))),
+                                shares,
+                                stale_not_doa_shares,
+                                stale_doa_shares,
+                                len(p2p_node.peers),
+                            ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
+                            if fracs:
+                                med = math.median(fracs)
+                                this_str += '\nPool stales: %i%%' % (int(100*med+.5),)
+                                conf = 0.95
+                                if shares:
+                                    this_str += u' Own: %i±%i%%' % tuple(int(100*x+.5) for x in math.interval_to_center_radius(math.binomial_conf_interval(stale_shares, shares, conf)))
+                                    if med < .99:
+                                        this_str += u' Own efficiency: %i±%i%%' % tuple(int(100*x+.5) for x in math.interval_to_center_radius((1 - y)/(1 - med) for y in math.binomial_conf_interval(stale_shares, shares, conf)[::-1]))
+                            if this_str != last_str or time.time() > last_time + 15:
+                                print this_str
+                                last_str = this_str
+                                last_time = time.time()
+                except:
+                    log.err()
+        status_thread()
     except:
         log.err(None, 'Fatal error:')
-    finally:
-        reactor.stop()
 
 def run():
     class FixedArgumentParser(argparse.ArgumentParser):
@@ -689,56 +697,56 @@ def run():
         help='''use the network's testnet''',
         action='store_const', const=True, default=False, dest='testnet')
     parser.add_argument('--debug',
-        help='debugging mode',
+        help='enable debugging mode',
         action='store_const', const=True, default=False, dest='debug')
     parser.add_argument('-a', '--address',
-        help='generate to this address (defaults to requesting one from bitcoind)',
+        help='generate payouts to this address (default: <address requested from bitcoind>)',
         type=str, action='store', default=None, dest='address')
     parser.add_argument('--logfile',
-        help='''log to specific file (defaults to <network_name>.log in run_p2pool.py's directory)''',
+        help='''log to this file (default: data/<NET>/log)''',
         type=str, action='store', default=None, dest='logfile')
     parser.add_argument('--merged-url',
-        help='call getauxblock on this url to get work for merged mining',
+        help='call getauxblock on this url to get work for merged mining (example: http://127.0.0.1:10332/)',
         type=str, action='store', default=None, dest='merged_url')
     parser.add_argument('--merged-userpass',
-        help='merge daemon user and password, separated by a colon. Example: ncuser:ncpass',
+        help='use this user and password when requesting merged mining work (example: ncuser:ncpass)',
         type=str, action='store', default=None, dest='merged_userpass')
     parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
-        help='percentage amount to donate to author of p2pool. Default: 0.5',
+        help='donate this percentage of work to author of p2pool (default: 0.5)',
         type=float, action='store', default=0.5, dest='donation_percentage')
     
     p2pool_group = parser.add_argument_group('p2pool interface')
     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
-        help='use TCP port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.P2P_PORT) for _, n in sorted(networks.realnets.items())),
+        help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.P2P_PORT) for _, n in sorted(networks.realnets.items())),
         type=int, action='store', default=None, dest='p2pool_port')
     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
-        help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to default p2pool P2P port), in addition to builtin addresses',
+        help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
         type=str, action='append', default=[], dest='p2pool_nodes')
     parser.add_argument('--disable-upnp',
-        help='''don't attempt to forward p2pool P2P port from the WAN to this computer using UPnP''',
+        help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
         action='store_false', default=True, dest='upnp')
     
     worker_group = parser.add_argument_group('worker interface')
     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
-        help='listen on PORT for RPC connections from miners asking for work and providing responses (default:%s)' % ', '.join('%s:%i' % (n.NAME, n.WORKER_PORT) for _, n in sorted(networks.realnets.items())),
+        help='listen on PORT for RPC connections from miners (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.WORKER_PORT) for _, n in sorted(networks.realnets.items())),
         type=int, action='store', default=None, dest='worker_port')
     worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
-        help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee . default: 0''',
+        help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
         type=float, action='store', default=0, dest='worker_fee')
     
     bitcoind_group = parser.add_argument_group('bitcoind interface')
     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
-        help='connect to a bitcoind at this address (default: 127.0.0.1)',
+        help='connect to this address (default: 127.0.0.1)',
         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
-        help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getmemorypool (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.BITCOIN_RPC_PORT) for _, n in sorted(networks.realnets.items())),
+        help='''connect to JSON-RPC interface at this port (default: %s)''' % ', '.join('%s:%i' % (n.NAME, n.PARENT.RPC_PORT) for _, n in sorted(networks.realnets.items())),
         type=int, action='store', default=None, dest='bitcoind_rpc_port')
     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
-        help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.BITCOIN_P2P_PORT) for _, n in sorted(networks.realnets.items())),
+        help='''connect to P2P interface at this port (default: %s)''' % ', '.join('%s:%i' % (n.NAME, n.PARENT.P2P_PORT) for _, n in sorted(networks.realnets.items())),
         type=int, action='store', default=None, dest='bitcoind_p2p_port')
     
     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSER',
-        help='bitcoind RPC interface username (default: empty)',
+        help='bitcoind RPC interface username (default: <empty>)',
         type=str, action='store', default='', nargs='?', dest='bitcoind_rpc_username')
     bitcoind_group.add_argument(metavar='BITCOIND_RPCPASSWORD',
         help='bitcoind RPC interface password',
@@ -751,9 +759,26 @@ def run():
     
     net = networks.nets[args.net_name + ('_testnet' if args.testnet else '')]
     
+    datadir_path = os.path.join(os.path.dirname(sys.argv[0]), 'data', net.NAME)
+    if not os.path.exists(datadir_path):
+        os.makedirs(datadir_path)
+    
     if args.logfile is None:
-        args.logfile = os.path.join(os.path.dirname(sys.argv[0]), net.NAME + '.log')
+        args.logfile = os.path.join(datadir_path, 'log')
     
+    class EncodeReplacerPipe(object):
+        def __init__(self, inner_file):
+            self.inner_file = inner_file
+            self.softspace = 0
+        def write(self, data):
+            if isinstance(data, unicode):
+                try:
+                    data = data.encode(self.inner_file.encoding, 'replace')
+                except:
+                    data = data.encode('ascii', 'replace')
+            self.inner_file.write(data)
+        def flush(self):
+            self.inner_file.flush()
     class LogFile(object):
         def __init__(self, filename):
             self.filename = filename
@@ -776,7 +801,7 @@ def run():
                 f = open(self.filename, 'wb')
                 f.write(data)
             f.close()
-            self.inner_file = open(self.filename, 'a')
+            self.inner_file = codecs.open(self.filename, 'a', 'utf-8')
         def write(self, data):
             self.inner_file.write(data)
         def flush(self):
@@ -804,8 +829,21 @@ def run():
             self.buf = lines[-1]
         def flush(self):
             pass
+    class AbortPipe(object):
+        def __init__(self, inner_file):
+            self.inner_file = inner_file
+            self.softspace = 0
+        def write(self, data):
+            try:
+                self.inner_file.write(data)
+            except:
+                sys.stdout = sys.__stdout__
+                log.DefaultObserver.stderr = sys.stderr = sys.__stderr__
+                raise
+        def flush(self):
+            self.inner_file.flush()
     logfile = LogFile(args.logfile)
-    sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
+    sys.stdout = sys.stderr = log.DefaultObserver.stderr = AbortPipe(TimestampingPipe(TeePipe([EncodeReplacerPipe(sys.stderr), logfile])))
     if hasattr(signal, "SIGUSR1"):
         def sigusr1(signum, frame):
             print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
@@ -815,10 +853,10 @@ def run():
     task.LoopingCall(logfile.reopen).start(5)
     
     if args.bitcoind_rpc_port is None:
-        args.bitcoind_rpc_port = net.BITCOIN_RPC_PORT
+        args.bitcoind_rpc_port = net.PARENT.RPC_PORT
     
     if args.bitcoind_p2p_port is None:
-        args.bitcoind_p2p_port = net.BITCOIN_P2P_PORT
+        args.bitcoind_p2p_port = net.PARENT.P2P_PORT
     
     if args.p2pool_port is None:
         args.p2pool_port = net.P2P_PORT
@@ -828,7 +866,7 @@ def run():
     
     if args.address is not None:
         try:
-            args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net)
+            args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
         except Exception, e:
             parser.error('error parsing address: ' + repr(e))
     else:
@@ -837,5 +875,5 @@ def run():
     if (args.merged_url is None) ^ (args.merged_userpass is None):
         parser.error('must specify --merged-url and --merged-userpass')
     
-    reactor.callWhenRunning(main, args, net)
+    reactor.callWhenRunning(main, args, net, datadir_path)
     reactor.run()