moved heighttracker to bitcoin.height_tracker
[p2pool.git] / p2pool / main.py
index 8702afd..379522f 100644 (file)
@@ -5,23 +5,21 @@ import StringIO
 import argparse
 import os
 import random
-import struct
 import sys
 import time
-import json
 import signal
 import traceback
 import urlparse
 
-from twisted.internet import defer, error, reactor, protocol, task
-from twisted.web import server, resource
+from twisted.internet import defer, reactor, protocol, task
+from twisted.web import server
 from twisted.python import log
 from nattraverso import portmapper, ipdiscover
 
 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
-from bitcoin import worker_interface
+from bitcoin import worker_interface, height_tracker
 from util import expiring_dict, jsonrpc, variable, deferral, math, logging, pack
-from . import p2p, networks, graphs
+from . import p2p, networks, web
 import p2pool, p2pool.data as p2pool_data
 
 @deferral.retry('Error getting work from bitcoind:', 3)
@@ -51,12 +49,6 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
     try:
         print 'p2pool (version %s)' % (p2pool.__version__,)
         print
-        try:
-            from . import draw
-        except ImportError:
-            draw = None
-            print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
-            print
         
         # connect to bitcoind over JSON-RPC and do initial getmemorypool
         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
@@ -79,7 +71,7 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
         print '    ...success!'
         print
         
-        print 'Determining payout script...'
+        print 'Determining payout address...'
         if args.pubkey_hash is None:
             address_path = os.path.join(datadir_path, 'cached_payout_address')
             
@@ -103,11 +95,10 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
             with open(address_path, 'wb') as f:
                 f.write(address)
             
-            my_script = bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(address, net.PARENT))
+            my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(address, net.PARENT)
         else:
-            print '    ...Computing payout script from provided address...'
-            my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
-        print '    ...success! Payout script:', bitcoin_data.script2_to_human(my_script, net.PARENT)
+            my_pubkey_hash = args.pubkey_hash
+        print '    ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)
         print
         
         my_share_hashes = set()
@@ -175,16 +166,7 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
             ))
         yield set_real_work1()
         
-        if '\ngetblock ' in (yield deferral.retry()(bitcoind.rpc_help)()):
-            height_cacher = deferral.DeferredCacher(defer.inlineCallbacks(lambda block_hash: defer.returnValue((yield bitcoind.rpc_getblock('%x' % (block_hash,)))['blockcount'])))
-            best_height_cached = variable.Variable((yield deferral.retry()(height_cacher)(pre_current_work.value['previous_block'])))
-            def get_height_rel_highest(block_hash):
-                this_height = height_cacher.call_now(block_hash, 0)
-                best_height = height_cacher.call_now(pre_current_work.value['previous_block'], 0)
-                best_height_cached.set(max(best_height_cached.value, this_height, best_height))
-                return this_height - best_height_cached.value
-        else:
-            get_height_rel_highest = bitcoin_p2p.HeightTracker(bitcoind, factory).get_height_rel_highest
+        get_height_rel_highest = yield height_tracker.get_height_rel_highest_func(bitcoind, factory, pre_current_work, net)
         
         def set_real_work2():
             best, desired = tracker.think(get_height_rel_highest, pre_current_work.value['previous_block'], pre_current_work.value['bits'])
@@ -304,13 +286,18 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
                 print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
                 peer.sendShares(shares)
         
+        @deferral.retry('Error submitting block: (will retry)', 10, 10)
+        @defer.inlineCallbacks
+        def submit_block(block, ignore_failure):
+            success = yield bitcoind.rpc_getmemorypool(bitcoin_data.block_type.pack(block).encode('hex'))
+            success_expected = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(block['header'])) <= block['header']['bits'].target
+            if (not success and success_expected and not ignore_failure) or (success and not success_expected):
+                print >>sys.stderr, 'Block submittal result: %s Expected: %s' % (result, expected_result)
+        
         @tracker.verified.added.watch
         def _(share):
             if share.pow_hash <= share.header['bits'].target:
-                if factory.conn.value is not None:
-                    factory.conn.value.send_block(block=share.as_block(tracker))
-                else:
-                    print >>sys.stderr, 'No bitcoind connection when block submittal attempted! Erp!'
+                submit_block(share.as_block(tracker), ignore_failure=True)
                 print
                 print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % (p2pool_data.format_hash(share.hash), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
                 print
@@ -356,9 +343,7 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
         )
         p2p_node.start()
         
-        def save_addrs():
-            open(os.path.join(datadir_path, 'addrs.txt'), 'w').writelines(repr(x) + '\n' for x in p2p_node.addr_store.iteritems())
-        task.LoopingCall(save_addrs).start(60)
+        task.LoopingCall(lambda: open(os.path.join(datadir_path, 'addrs.txt'), 'w').writelines(repr(x) + '\n' for x in p2p_node.addr_store.iteritems())).start(60)
         
         # send share when the chain changes to their chain
         def work_changed(new_work):
@@ -385,24 +370,21 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
         print '    ...success!'
         print
         
-        start_time = time.time()
-        
-        @defer.inlineCallbacks
-        def upnp_thread():
-            while True:
-                try:
-                    is_lan, lan_ip = yield ipdiscover.get_local_ip()
-                    if is_lan:
-                        pm = yield portmapper.get_port_mapper()
-                        yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
-                except defer.TimeoutError:
-                    pass
-                except:
-                    if p2pool.DEBUG:
-                        log.err(None, "UPnP error:")
-                yield deferral.sleep(random.expovariate(1/120))
-        
         if args.upnp:
+            @defer.inlineCallbacks
+            def upnp_thread():
+                while True:
+                    try:
+                        is_lan, lan_ip = yield ipdiscover.get_local_ip()
+                        if is_lan:
+                            pm = yield portmapper.get_port_mapper()
+                            yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
+                    except defer.TimeoutError:
+                        pass
+                    except:
+                        if p2pool.DEBUG:
+                            log.err(None, 'UPnP error:')
+                    yield deferral.sleep(random.expovariate(1/120))
             upnp_thread()
         
         # start listening for workers with a JSON-RPC server
@@ -421,6 +403,7 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
         # setup worker logic
         
         removed_unstales_var = variable.Variable((0, 0, 0))
+        removed_doa_unstales_var = variable.Variable(0)
         @tracker.verified.removed.watch
         def _(share):
             if share.hash in my_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
@@ -430,10 +413,6 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
                     removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 253 else 0),
                     removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 254 else 0),
                 ))
-        
-        removed_doa_unstales_var = variable.Variable(0)
-        @tracker.verified.removed.watch
-        def _(share):
             if share.hash in my_doa_share_hashes and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
                 removed_doa_unstales.set(removed_doa_unstales.value + 1)
         
@@ -453,6 +432,7 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
             return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
         
         
+        pseudoshare_received = variable.Event()
         local_rate_monitor = math.RateMonitor(10*60)
         
         class WorkerBridge(worker_interface.WorkerBridge):
@@ -461,22 +441,36 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
                 self.new_work_event = current_work.changed
                 self.recent_shares_ts_work = []
             
-            def _get_payout_script_from_username(self, user):
-                if user is None:
-                    return None
-                try:
-                    pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, net.PARENT)
-                except: # XXX blah
-                    return None
-                return bitcoin_data.pubkey_hash_to_script2(pubkey_hash)
-            
             def preprocess_request(self, request):
-                payout_script = self._get_payout_script_from_username(request.getUser())
-                if payout_script is None or random.uniform(0, 100) < args.worker_fee:
-                    payout_script = my_script
-                return payout_script,
+                user = request.getUser() if request.getUser() is not None else ''
+                
+                desired_pseudoshare_target = None
+                if '+' in user:
+                    user, desired_pseudoshare_difficulty_str = user.rsplit('+', 1)
+                    try:
+                        desired_pseudoshare_target = bitcoin_data.difficulty_to_target(float(desired_pseudoshare_difficulty_str))
+                    except:
+                        pass
+                
+                desired_share_target = 2**256 - 1
+                if '/' in user:
+                    user, min_diff_str = user.rsplit('/', 1)
+                    try:
+                        desired_share_target = bitcoin_data.difficulty_to_target(float(min_diff_str))
+                    except:
+                        pass
+                
+                if random.uniform(0, 100) < args.worker_fee:
+                    pubkey_hash = my_pubkey_hash
+                else:
+                    try:
+                        pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, net.PARENT)
+                    except: # XXX blah
+                        pubkey_hash = my_pubkey_hash
+                
+                return pubkey_hash, desired_share_target, desired_pseudoshare_target
             
-            def get_work(self, payout_script):
+            def get_work(self, pubkey_hash, desired_share_target, desired_pseudoshare_target):
                 if len(p2p_node.peers) == 0 and net.PERSIST:
                     raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
                 if current_work.value['best_share_hash'] is None and net.PERSIST:
@@ -502,8 +496,8 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
                     share_data=dict(
                         previous_share_hash=current_work.value['best_share_hash'],
                         coinbase=(mm_data + current_work.value['coinbaseflags'])[:100],
-                        nonce=struct.pack('<Q', random.randrange(2**64)),
-                        new_script=payout_script,
+                        nonce=random.randrange(2**32),
+                        pubkey_hash=pubkey_hash,
                         subsidy=current_work2.value['subsidy'],
                         donation=math.perfect_round(65535*args.donation_percentage/100),
                         stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
@@ -514,19 +508,24 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
                     ),
                     block_target=current_work.value['bits'].target,
                     desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
+                    desired_target=desired_share_target,
                     net=net,
                 )
                 
-                target = 2**256//2**32 - 1
-                if len(self.recent_shares_ts_work) == 50:
-                    hash_rate = sum(work for ts, work in self.recent_shares_ts_work)//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
-                    target = min(target, 2**256//(hash_rate * 5))
+                target = net.PARENT.SANE_MAX_TARGET
+                if desired_pseudoshare_target is None:
+                    if len(self.recent_shares_ts_work) == 50:
+                        hash_rate = sum(work for ts, work in self.recent_shares_ts_work[1:])//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
+                        target = min(target, 2**256//hash_rate)
+                else:
+                    target = min(target, desired_pseudoshare_target)
                 target = max(target, share_info['bits'].target)
                 for aux_work in current_work.value['mm_chains'].itervalues():
                     target = max(target, aux_work['target'])
                 
                 transactions = [generate_tx] + list(current_work2.value['transactions'])
-                merkle_root = bitcoin_data.check_merkle_branch(bitcoin_data.hash256(bitcoin_data.tx_type.pack(generate_tx)), 0, current_work2.value['merkle_branch'])
+                packed_generate_tx = bitcoin_data.tx_type.pack(generate_tx)
+                merkle_root = bitcoin_data.check_merkle_branch(bitcoin_data.hash256(packed_generate_tx), 0, current_work2.value['merkle_branch'])
                 
                 getwork_time = time.time()
                 merkle_branch = current_work2.value['merkle_branch']
@@ -558,13 +557,7 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
                     
                     try:
                         if pow_hash <= header['bits'].target or p2pool.DEBUG:
-                            @deferral.retry('Error submitting primary block: (will retry)', 10, 10)
-                            def submit_block():
-                                if factory.conn.value is None:
-                                    print >>sys.stderr, 'No bitcoind connection when block submittal attempted! %s%32x' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
-                                    raise deferral.RetrySilentlyException()
-                                factory.conn.value.send_block(block=dict(header=header, txs=transactions))
-                            submit_block()
+                            submit_block(dict(header=header, txs=transactions), ignore_failure=False)
                             if pow_hash <= header['bits'].target:
                                 print
                                 print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
@@ -603,7 +596,10 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
                             log.err(None, 'Error while processing merged mining POW:')
                     
                     if pow_hash <= share_info['bits'].target:
-                        share = p2pool_data.Share(net, header, share_info, merkle_branch=merkle_branch, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
+                        min_header = dict(header);del min_header['merkle_root']
+                        hash_link = p2pool_data.prefix_to_hash_link(packed_generate_tx[:-32-4], p2pool_data.gentx_before_refhash)
+                        share = p2pool_data.Share(net, None, min_header, share_info, hash_link=hash_link, merkle_branch=merkle_branch, other_txs=transactions[1:] if pow_hash <= header['bits'].target else None)
+                        
                         print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
                             request.getUser(),
                             p2pool_data.format_hash(share.hash),
@@ -628,250 +624,34 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
                         except:
                             log.err(None, 'Error forwarding block solution:')
                     
-                    if pow_hash <= target and header_hash not in received_header_hashes:
-                        reactor.callLater(1, grapher.add_localrate_point, bitcoin_data.target_to_average_attempts(target), not on_time)
-                        if request.getPassword() == vip_pass:
-                            reactor.callLater(1, grapher.add_localminer_point, request.getUser(), bitcoin_data.target_to_average_attempts(target), not on_time)
-                        self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
-                        while len(self.recent_shares_ts_work) > 50:
-                            self.recent_shares_ts_work.pop(0)
-                        local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=request.getUser()))
-                    
-                    if header_hash in received_header_hashes:
-                        print >>sys.stderr, 'Worker %s @ %s submitted share more than once!' % (request.getUser(), request.getClientIP())
-                    received_header_hashes.add(header_hash)
-                    
                     if pow_hash > target:
                         print 'Worker %s submitted share with hash > target:' % (request.getUser(),)
                         print '    Hash:   %56x' % (pow_hash,)
                         print '    Target: %56x' % (target,)
+                    elif header_hash in received_header_hashes:
+                        print >>sys.stderr, 'Worker %s @ %s submitted share more than once!' % (request.getUser(), request.getClientIP())
+                    else:
+                        received_header_hashes.add(header_hash)
+                        
+                        pseudoshare_received.happened(bitcoin_data.target_to_average_attempts(target), not on_time, request.getUser() if request.getPassword() == vip_pass else None)
+                        self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
+                        while len(self.recent_shares_ts_work) > 50:
+                            self.recent_shares_ts_work.pop(0)
+                        local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=request.getUser()))
                     
                     return on_time
                 
                 return ba, got_response
         
-        web_root = resource.Resource()
-        worker_interface.WorkerInterface(WorkerBridge()).attach_to(web_root)
-        
-        def get_rate():
-            if tracker.get_height(current_work.value['best_share_hash']) < 720:
-                return json.dumps(None)
-            return json.dumps(p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
-                / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720)))
-        
-        def get_users():
-            height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
-            weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
-            res = {}
-            for script in sorted(weights, key=lambda s: weights[s]):
-                res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight
-            return json.dumps(res)
-        
-        def get_current_txouts():
-            share = tracker.shares[current_work.value['best_share_hash']]
-            share_info, gentx = p2pool_data.generate_transaction(tracker, share.share_info['share_data'], share.header['bits'].target, share.share_info['timestamp'], share.net)
-            return dict((out['script'], out['value']) for out in gentx['tx_outs'])
-        
-        def get_current_scaled_txouts(scale, trunc=0):
-            txouts = get_current_txouts()
-            total = sum(txouts.itervalues())
-            results = dict((script, value*scale//total) for script, value in txouts.iteritems())
-            if trunc > 0:
-                total_random = 0
-                random_set = set()
-                for s in sorted(results, key=results.__getitem__):
-                    if results[s] >= trunc:
-                        break
-                    total_random += results[s]
-                    random_set.add(s)
-                if total_random:
-                    winner = math.weighted_choice((script, results[script]) for script in random_set)
-                    for script in random_set:
-                        del results[script]
-                    results[winner] = total_random
-            if sum(results.itervalues()) < int(scale):
-                results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
-            return results
-        
-        def get_current_payouts():
-            return json.dumps(dict((bitcoin_data.script2_to_human(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems()))
-        
-        def get_patron_sendmany(this):
-            try:
-                if '/' in this:
-                    this, trunc = this.split('/', 1)
-                else:
-                    trunc = '0.01'
-                return json.dumps(dict(
-                    (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8)
-                    for script, value in get_current_scaled_txouts(scale=int(float(this)*1e8), trunc=int(float(trunc)*1e8)).iteritems()
-                    if bitcoin_data.script2_to_address(script, net.PARENT) is not None
-                ))
-            except:
-                return json.dumps(None)
-        
-        def get_global_stats():
-            # averaged over last hour
-            lookbehind = 3600//net.SHARE_PERIOD
-            if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
-                return None
-            
-            nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)
-            stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
-            return json.dumps(dict(
-                pool_nonstale_hash_rate=nonstale_hash_rate,
-                pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
-                pool_stale_prop=stale_prop,
-            ))
-        
-        def get_local_stats():
-            lookbehind = 3600//net.SHARE_PERIOD
-            if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
-                return None
-            
-            global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
-            
-            my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes)
-            my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253)
-            my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254)
-            my_share_count = my_unstale_count + my_orphan_count + my_doa_count
-            my_stale_count = my_orphan_count + my_doa_count
-            
-            my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
-            
-            my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
-                for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1)
-                if share.hash in my_share_hashes)
-            actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp -
-                tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp)
-            share_att_s = my_work / actual_time
-            
-            miner_hash_rates = {}
-            miner_dead_hash_rates = {}
-            datums, dt = local_rate_monitor.get_datums_in_last()
-            for datum in datums:
-                miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
-                if datum['dead']:
-                    miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
-            
-            return json.dumps(dict(
-                my_hash_rates_in_last_hour=dict(
-                    note="DEPRECATED",
-                    nonstale=share_att_s,
-                    rewarded=share_att_s/(1 - global_stale_prop),
-                    actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
-                ),
-                my_share_counts_in_last_hour=dict(
-                    shares=my_share_count,
-                    unstale_shares=my_unstale_count,
-                    stale_shares=my_stale_count,
-                    orphan_stale_shares=my_orphan_count,
-                    doa_stale_shares=my_doa_count,
-                ),
-                my_stale_proportions_in_last_hour=dict(
-                    stale=my_stale_prop,
-                    orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
-                    dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
-                ),
-                miner_hash_rates=miner_hash_rates,
-                miner_dead_hash_rates=miner_dead_hash_rates,
-            ))
-        
-        def get_peer_addresses():
-            return ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues())
-        
-        def get_uptime():
-            return json.dumps(time.time() - start_time)
+        get_current_txouts = lambda: p2pool_data.get_expected_payouts(tracker, current_work.value['best_share_hash'], current_work.value['bits'].target, current_work2.value['subsidy'], net)
         
-        class WebInterface(resource.Resource):
-            def __init__(self, func, mime_type, *fields):
-                self.func, self.mime_type, self.fields = func, mime_type, fields
-            
-            def render_GET(self, request):
-                request.setHeader('Content-Type', self.mime_type)
-                request.setHeader('Access-Control-Allow-Origin', '*')
-                return self.func(*(request.args[field][0] for field in self.fields))
-        
-        web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
-        web_root.putChild('users', WebInterface(get_users, 'application/json'))
-        web_root.putChild('fee', WebInterface(lambda: json.dumps(args.worker_fee), 'application/json'))
-        web_root.putChild('current_payouts', WebInterface(get_current_payouts, 'application/json'))
-        web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain', 'total'))
-        web_root.putChild('global_stats', WebInterface(get_global_stats, 'application/json'))
-        web_root.putChild('local_stats', WebInterface(get_local_stats, 'application/json'))
-        web_root.putChild('peer_addresses', WebInterface(get_peer_addresses, 'text/plain'))
-        web_root.putChild('payout_addr', WebInterface(lambda: json.dumps(bitcoin_data.script2_to_human(my_script, net.PARENT)), 'application/json'))
-        web_root.putChild('recent_blocks', WebInterface(lambda: json.dumps(recent_blocks), 'application/json'))
-        web_root.putChild('uptime', WebInterface(get_uptime, 'application/json'))
-        if draw is not None:
-            web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
-        
-        new_root = resource.Resource()
-        web_root.putChild('web', new_root)
-        
-        stat_log = []
-        if os.path.exists(os.path.join(datadir_path, 'stats')):
-            try:
-                with open(os.path.join(datadir_path, 'stats'), 'rb') as f:
-                    stat_log = json.loads(f.read())
-            except:
-                log.err(None, 'Error loading stats:')
-        def update_stat_log():
-            while stat_log and stat_log[0]['time'] < time.time() - 24*60*60:
-                stat_log.pop(0)
-            
-            lookbehind = 3600//net.SHARE_PERIOD
-            if tracker.get_height(current_work.value['best_share_hash']) < lookbehind:
-                return None
-            
-            global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind)
-            (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts()
-            
-            miner_hash_rates = {}
-            miner_dead_hash_rates = {}
-            datums, dt = local_rate_monitor.get_datums_in_last()
-            for datum in datums:
-                miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
-                if datum['dead']:
-                    miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
-            
-            stat_log.append(dict(
-                time=time.time(),
-                pool_hash_rate=p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)/(1-global_stale_prop),
-                pool_stale_prop=global_stale_prop,
-                local_hash_rates=miner_hash_rates,
-                local_dead_hash_rates=miner_dead_hash_rates,
-                shares=shares,
-                stale_shares=stale_orphan_shares + stale_doa_shares,
-                stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares),
-                current_payout=get_current_txouts().get(my_script, 0)*1e-8,
-            ))
-            
-            with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
-                f.write(json.dumps(stat_log))
-        task.LoopingCall(update_stat_log).start(5*60)
-        new_root.putChild('log', WebInterface(lambda: json.dumps(stat_log), 'application/json'))
+        web_root = web.get_web_root(tracker, current_work, current_work2, get_current_txouts, datadir_path, net, get_stale_counts, my_pubkey_hash, local_rate_monitor, args.worker_fee, p2p_node, my_share_hashes, recent_blocks, pseudoshare_received)
+        worker_interface.WorkerInterface(WorkerBridge()).attach_to(web_root)
         
-        grapher = graphs.Grapher(os.path.join(datadir_path, 'rrd'))
-        web_root.putChild('graphs', grapher.get_resource())
-        def add_point():
-            if tracker.get_height(current_work.value['best_share_hash']) < 720:
-                return
-            nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)
-            poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720))
-            grapher.add_poolrate_point(poolrate, poolrate - nonstalerate)
-        task.LoopingCall(add_point).start(100)
+        deferral.retry('Error binding to worker port:', traceback=False)(reactor.listenTCP)(worker_endpoint[1], server.Site(web_root), interface=worker_endpoint[0])
         
-        def attempt_listen():
-            try:
-                reactor.listenTCP(worker_endpoint[1], server.Site(web_root), interface=worker_endpoint[0])
-            except error.CannotListenError, e:
-                print >>sys.stderr, 'Error binding to worker port: %s. Retrying in 1 second.' % (e.socketError,)
-                reactor.callLater(1, attempt_listen)
-            else:
-                with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
-                    pass
-        attempt_listen()
+        with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
+            pass
         
         print '    ...success!'
         print
@@ -904,20 +684,26 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
         if args.irc_announce:
             from twisted.words.protocols import irc
             class IRCClient(irc.IRCClient):
-                nickname = 'p2pool_%s%i' % (net.NAME, random.randrange(100))
+                nickname = 'p2pool%02i' % (random.randrange(100),)
+                channel = '#p2pool' if net.NAME == 'bitcoin' else '#p2pool-alt'
                 def lineReceived(self, line):
                     print repr(line)
                     irc.IRCClient.lineReceived(self, line)
                 def signedOn(self):
                     irc.IRCClient.signedOn(self)
                     self.factory.resetDelay()
-                    self.join('#p2pool')
+                    self.join(self.channel)
                     self.watch_id = tracker.verified.added.watch(self._new_share)
                     self.announced_hashes = set()
+                    self.delayed_messages = {}
+                def privmsg(self, user, channel, message):
+                    if channel == self.channel and message in self.delayed_messages:
+                        self.delayed_messages.pop(message).cancel()
                 def _new_share(self, share):
                     if share.pow_hash <= share.header['bits'].target and share.header_hash not in self.announced_hashes and abs(share.timestamp - time.time()) < 10*60:
                         self.announced_hashes.add(share.header_hash)
-                        self.say('#p2pool', '\x02BLOCK FOUND by %s! %s%064x' % (bitcoin_data.script2_to_address(share.share_data['new_script'], net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash))
+                        message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
+                        self.delayed_messages[message] = reactor.callLater(random.expovariate(1/60), lambda: (self.say(self.channel, message), self.delayed_messages.pop(message)))
                 def connectionLost(self, reason):
                     tracker.verified.added.unwatch(self.watch_id)
                     print 'IRC connection lost:', reason.getErrorMessage()
@@ -950,7 +736,7 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
                         math.format(int(my_att_s)),
                         math.format_dt(dt),
                         math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95),
-                        math.format_dt(2**256 / tracker.shares[current_work.value['best_share_hash']].target / my_att_s) if my_att_s else '???',
+                        math.format_dt(2**256 / tracker.shares[current_work.value['best_share_hash']].max_target / my_att_s) if my_att_s and current_work.value['best_share_hash'] else '???',
                     )
                     
                     if height > 2:
@@ -962,7 +748,7 @@ def main(args, net, datadir_path, merged_urls, worker_endpoint):
                             shares, stale_orphan_shares, stale_doa_shares,
                             math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
                             math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
-                            get_current_txouts().get(my_script, 0)*1e-8, net.PARENT.SYMBOL,
+                            get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8, net.PARENT.SYMBOL,
                         )
                         this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
                             math.format(int(real_att_s)),
@@ -1119,6 +905,8 @@ def run():
             ]:
                 if getattr(args, var_name) is None and cp.has_option('x', conf_name):
                     setattr(args, var_name, var_type(cp.get('x', conf_name)))
+        if args.bitcoind_rpc_password is None:
+            parser.error('''Bitcoin configuration file didn't contain an rpcpassword= line! Add one!''')
     
     if args.bitcoind_rpc_username is None:
         args.bitcoind_rpc_username = ''