made all cache files be saved in data/
[p2pool.git] / p2pool / main.py
index 57b9c04..dd32c60 100644 (file)
@@ -1,4 +1,5 @@
 #!/usr/bin/python
+# coding=utf-8
 
 from __future__ import division
 
@@ -7,7 +8,6 @@ import datetime
 import itertools
 import os
 import random
-import sqlite3
 import struct
 import sys
 import time
@@ -22,7 +22,7 @@ from nattraverso import portmapper, ipdiscover
 
 import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
 from bitcoin import worker_interface
-from util import db, expiring_dict, jsonrpc, variable, deferral, math
+from util import expiring_dict, jsonrpc, variable, deferral, math
 from . import p2p, skiplists, networks
 import p2pool, p2pool.data as p2pool_data
 
@@ -43,16 +43,15 @@ def getwork(bitcoind):
 @defer.inlineCallbacks
 def get_payout_script2(bitcoind, net):
     address = yield bitcoind.rpc_getaccountaddress('p2pool')
-    try:
-        pubkey = (yield bitcoind.rpc_validateaddress(address))['pubkey'].decode('hex')
-    except:
-        log.err()
+    validate_response = yield bitcoind.rpc_validateaddress(address)
+    if 'pubkey' not in validate_response:
+        print '    Pubkey request failed. Falling back to payout to address.'
         defer.returnValue(bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(address, net)))
-    else:
-        defer.returnValue(bitcoin_data.pubkey_to_script2(pubkey))
+    pubkey = validate_response['pubkey'].decode('hex')
+    defer.returnValue(bitcoin_data.pubkey_to_script2(pubkey))
 
 @defer.inlineCallbacks
-def main(args, net):
+def main(args, net, datadir_path):
     try:
         print 'p2pool (version %s)' % (p2pool.__version__,)
         print
@@ -94,14 +93,11 @@ def main(args, net):
         print '    Payout script:', bitcoin_data.script2_to_human(my_script, net)
         print
         
-        print 'Loading cached block headers...'
-        ht = bitcoin_p2p.HeightTracker(factory, net.NAME + '_headers.dat')
-        print '   ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
-        print
+        ht = bitcoin_p2p.HeightTracker(bitcoind, factory)
         
         tracker = p2pool_data.OkayTracker(net)
         shared_share_hashes = set()
-        ss = p2pool_data.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), net.NAME + '_shares.'), net)
+        ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
         known_verified = set()
         print "Loading shares..."
         for i, (mode, contents) in enumerate(ss.get_shares()):
@@ -125,14 +121,15 @@ def main(args, net):
             tracker.verified.add(tracker.shares[h])
         print "    ...done loading %i shares!" % (len(tracker.shares),)
         print
-        tracker.added.watch(lambda share: ss.add_share(share))
-        tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
         tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
         
         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
         
+        pre_current_work = variable.Variable(None)
+        pre_current_work2 = variable.Variable(None)
+        pre_merged_work = variable.Variable(None)
         # information affecting work that should trigger a long-polling update
         current_work = variable.Variable(None)
         # information affecting work that should not trigger a long-polling update
@@ -145,29 +142,26 @@ def main(args, net):
         @defer.inlineCallbacks
         def set_real_work1():
             work = yield getwork(bitcoind)
-            changed = work['previous_block_hash'] != current_work.value['previous_block'] if current_work.value is not None else True
-            current_work.set(dict(
-                version=work['version'],
-                previous_block=work['previous_block_hash'],
-                target=work['target'],
-                best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
-                aux_work=current_work.value['aux_work'] if current_work.value is not None else None,
-            ))
-            current_work2.set(dict(
+            pre_current_work2.set(dict(
                 time=work['time'],
                 transactions=work['transactions'],
                 subsidy=work['subsidy'],
                 clock_offset=time.time() - work['time'],
                 last_update=time.time(),
+            )) # second set first because everything hooks on the first
+            pre_current_work.set(dict(
+                version=work['version'],
+                previous_block=work['previous_block_hash'],
+                target=work['target'],
             ))
-            if changed:
-                set_real_work2()
         
         def set_real_work2():
-            best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
+            best, desired = tracker.think(ht, pre_current_work.value['previous_block'], time.time() - pre_current_work2.value['clock_offset'])
             
-            t = dict(current_work.value)
+            current_work2.set(pre_current_work2.value)
+            t = dict(pre_current_work.value)
             t['best_share_hash'] = best
+            t['aux_work'] = pre_merged_work.value
             current_work.set(t)
             
             t = time.time()
@@ -197,28 +191,28 @@ def main(args, net):
                     ))[:100],
                 )
                 requested[share_hash] = t, count + 1
+        pre_current_work.changed.watch(lambda _: set_real_work2())
         
         print 'Initializing work...'
         yield set_real_work1()
-        set_real_work2()
         print '    ...success!'
         print
         
+        pre_merged_work.changed.watch(lambda _: set_real_work2())
+        ht.updated.watch(set_real_work2)
+        
         @defer.inlineCallbacks
         def set_merged_work():
             if not args.merged_url:
                 return
+            merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
             while True:
-                merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
                 auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged.rpc_getauxblock)()
-                x = dict(current_work.value)
-                x['aux_work'] = dict(
+                pre_merged_work.set(dict(
                     hash=int(auxblock['hash'], 16),
                     target=bitcoin_data.HashType().unpack(auxblock['target'].decode('hex')),
                     chain_id=auxblock['chainid'],
-                )
-                #print x['aux_work']
-                current_work.set(x)
+                ))
                 yield deferral.sleep(1)
         set_merged_work()
         
@@ -315,15 +309,25 @@ def main(args, net):
                 nodes.add(((yield reactor.resolve(host)), net.P2P_PORT))
             except:
                 log.err(None, 'Error resolving bootstrap node IP:')
-
+        
         if net.NAME == 'litecoin':
             nodes.add(((yield reactor.resolve('liteco.in')), net.P2P_PORT))
         
+        addrs = {}
+        try:
+            addrs = dict(eval(x) for x in open(os.path.join(datadir_path, 'addrs.txt')))
+        except:
+            print "error reading addrs"
+        
+        def save_addrs():
+            open(os.path.join(datadir_path, 'addrs.txt'), 'w').writelines(repr(x) + '\n' for x in addrs.iteritems())
+        task.LoopingCall(save_addrs).start(60)
+        
         p2p_node = p2p.Node(
             current_work=current_work,
             port=args.p2pool_port,
             net=net,
-            addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), net.NAME),
+            addr_store=addrs,
             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
         )
         p2p_node.handle_shares = p2p_shares
@@ -347,6 +351,13 @@ def main(args, net):
         
         current_work.changed.watch(work_changed)
         
+        def save_shares():
+            for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 2*net.CHAIN_LENGTH):
+                ss.add_share(share)
+                if share.hash in tracker.verified.shares:
+                    ss.add_verified_hash(share.hash)
+        task.LoopingCall(save_shares).start(60)
+        
         print '    ...success!'
         print
         
@@ -406,38 +417,25 @@ def main(args, net):
         
         def compute(request):
             state = current_work.value
+            
             payout_script = get_payout_script_from_username(request)
             if payout_script is None or random.uniform(0, 100) < args.worker_fee:
                 payout_script = my_script
-            if state['best_share_hash'] is None and net.PERSIST:
-                raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
+            
             if len(p2p_node.peers) == 0 and net.PERSIST:
                 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
+            if state['best_share_hash'] is None and net.PERSIST:
+                raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
             if time.time() > current_work2.value['last_update'] + 60:
                 raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
             
-            if state['aux_work'] is not None:
-                aux_str = '\xfa\xbemm' + bitcoin_data.HashType().pack(state['aux_work']['hash'])[::-1] + struct.pack('<ii', 1, 0)
-            else:
-                aux_str = ''
-            
-            # XXX assuming generate_tx is smallish here..
-            def get_stale_frac():
-                shares, stale_shares = get_share_counts()
-                if shares == 0:
-                    return ""
-                frac = stale_shares/shares
-                return 2*struct.pack('<H', int(65535*frac + .5))
+            previous_share = None if state['best_share_hash'] is None else tracker.shares[state['best_share_hash']]
             subsidy = current_work2.value['subsidy']
-            
-            
-            timestamp = current_work2.value['time']
-            previous_share = tracker.shares[state['best_share_hash']] if state['best_share_hash'] is not None else None
-            new_share_info, generate_tx = p2pool_data.new_generate_transaction(
+            share_info, generate_tx = p2pool_data.generate_transaction(
                 tracker=tracker,
-                new_share_data=dict(
+                share_data=dict(
                     previous_share_hash=state['best_share_hash'],
-                    coinbase=aux_str,
+                    coinbase='' if state['aux_work'] is None else '\xfa\xbemm' + bitcoin_data.HashType().pack(state['aux_work']['hash'])[::-1] + struct.pack('<ii', 1, 0),
                     nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
                     new_script=payout_script,
                     subsidy=subsidy,
@@ -451,21 +449,21 @@ def main(args, net):
                 net=net,
             )
             
-            print 'New work for worker! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (bitcoin_data.target_to_difficulty(new_share_info['target']), (sum(t['value'] for t in generate_tx['tx_outs'] if t['script'] == payout_script) -subsidy//200)*1e-8, net.BITCOIN_SYMBOL, subsidy*1e-8, net.BITCOIN_SYMBOL, len(current_work2.value['transactions']))
-            #print 'Target: %x' % (p2pool_data.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
-            #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
+            print 'New work for worker! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (
+                bitcoin_data.target_to_difficulty(share_info['target']),
+                (sum(t['value'] for t in generate_tx['tx_outs'] if t['script'] == payout_script) - subsidy//200)*1e-8, net.BITCOIN_SYMBOL,
+                subsidy*1e-8, net.BITCOIN_SYMBOL,
+                len(current_work2.value['transactions']),
+            )
+            
             transactions = [generate_tx] + list(current_work2.value['transactions'])
             merkle_root = bitcoin_data.merkle_hash(transactions)
-            merkle_root_to_transactions[merkle_root] = new_share_info, transactions
+            merkle_root_to_transactions[merkle_root] = share_info, transactions, time.time()
             
-            target2 = new_share_info['target']
-            times[merkle_root] = time.time()
-            #print 'SENT', 2**256//p2pool_data.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
-            return bitcoin_getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2), state['best_share_hash']
+            return bitcoin_getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, current_work2.value['time'], state['target'], share_info['target']), state['best_share_hash']
         
         my_shares = set()
         doa_shares = set()
-        times = {}
         
         def got_response(header, request):
             try:
@@ -475,8 +473,7 @@ def main(args, net):
                 if xxx is None:
                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
                     return False
-                share_info, transactions = xxx
-                new_share_info = share_info
+                share_info, transactions, getwork_time = xxx
                 
                 hash_ = bitcoin_data.block_header_type.hash256(header)
                 
@@ -515,15 +512,15 @@ def main(args, net):
                     except:
                         log.err(None, 'Error while processing merged mining POW:')
                 
-                target = new_share_info['target']
+                target = share_info['target']
                 if pow_hash > target:
                     print 'Worker submitted share with hash > target:\nhash  : %x\ntarget: %x' % (pow_hash, target)
                     return False
-                share = p2pool_data.NewShare(net, header, new_share_info, other_txs=transactions[1:])
+                share = p2pool_data.Share(net, header, share_info, other_txs=transactions[1:])
                 my_shares.add(share.hash)
                 if share.previous_hash != current_work.value['best_share_hash']:
                     doa_shares.add(share.hash)
-                print 'GOT SHARE! %s %s prev %s age %.2fs' % (user, p2pool_data.format_hash(share.hash), p2pool_data.format_hash(share.previous_hash), time.time() - times[header['merkle_root']]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
+                print 'GOT SHARE! %s %s prev %s age %.2fs' % (user, p2pool_data.format_hash(share.hash), p2pool_data.format_hash(share.previous_hash), time.time() - getwork_time) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
                 good = share.previous_hash == current_work.value['best_share_hash']
                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
                 p2p_shares([share])
@@ -538,7 +535,7 @@ def main(args, net):
         def get_rate():
             if current_work.value['best_share_hash'] is not None:
                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
-                att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], net, min(height - 1, 720))
+                att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720))
                 fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
                 return json.dumps(int(att_s / (1. - (math.median(fracs) if fracs else 0))))
             return json.dumps(None)
@@ -581,8 +578,6 @@ def main(args, net):
         print 'Started successfully!'
         print
         
-        ht.updated.watch(set_real_work2)
-        
         @defer.inlineCallbacks
         def work1_thread():
             while True:
@@ -614,8 +609,8 @@ def main(args, net):
             signal.signal(signal.SIGALRM, watchdog_handler)
             task.LoopingCall(signal.alarm, 30).start(1)
         
-        
-        pool_str = None;
+        last_str = None
+        last_time = 0
         while True:
             yield deferral.sleep(3)
             try:
@@ -624,12 +619,12 @@ def main(args, net):
                 if current_work.value['best_share_hash'] is not None:
                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
                     if height > 2:
-                        att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], net, min(height - 1, 720))
+                        att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720))
                         weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
                         shares, stale_doa_shares, stale_not_doa_shares = get_share_counts(True)
                         stale_shares = stale_doa_shares + stale_not_doa_shares
                         fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
-                        str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i' % (
+                        this_str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i' % (
                             math.format(int(att_s / (1. - (math.median(fracs) if fracs else 0)))),
                             height,
                             len(tracker.verified.shares),
@@ -641,16 +636,19 @@ def main(args, net):
                             stale_doa_shares,
                             len(p2p_node.peers),
                         ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
-                        if (str != pool_str):
-                            print str;
-                            pool_str = str;
                         if fracs:
                             med = math.median(fracs)
-                            print 'Median stale proportion:', med
+                            this_str += '\nPool stales: %i%%' % (int(100*med+.5),)
+                            conf = 0.9
                             if shares:
-                                print '    Own:', stale_shares/shares
+                                this_str += ' Own: %i±%i%%' % tuple(int(100*x+.5) for x in math.interval_to_center_radius(math.binomial_conf_interval(stale_shares, shares, conf)))
                                 if med < .99:
-                                    print '    Own efficiency: %.02f%%' % (100*(1 - stale_shares/shares)/(1 - med),)
+                                    this_str += ' Own efficiency: %i±%i%%' % tuple(int(100*x+.5) for x in math.interval_to_center_radius((1 - y)/(1 - med) for y in math.binomial_conf_interval(stale_shares, shares, conf)[::-1]))
+                                this_str += ' (%i%% confidence)' % (int(100*conf+.5),)
+                        if this_str != last_str or time.time() > last_time + 15:
+                            print this_str
+                            last_str = this_str
+                            last_time = time.time()
             
             
             except:
@@ -698,7 +696,7 @@ def run():
     parser.add_argument('--version', action='version', version=p2pool.__version__)
     parser.add_argument('--net',
         help='use specified network (default: bitcoin)',
-        action='store', choices=sorted(x for x in networks.nets if 'testnet' not in x), default='bitcoin', dest='net_name')
+        action='store', choices=sorted(networks.realnets), default='bitcoin', dest='net_name')
     parser.add_argument('--testnet',
         help='''use the network's testnet''',
         action='store_const', const=True, default=False, dest='testnet')
@@ -723,21 +721,21 @@ def run():
     
     p2pool_group = parser.add_argument_group('p2pool interface')
     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
-        help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
+        help='use TCP port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.P2P_PORT) for _, n in sorted(networks.realnets.items())),
         type=int, action='store', default=None, dest='p2pool_port')
     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
-        help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
+        help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to default p2pool P2P port), in addition to builtin addresses',
         type=str, action='append', default=[], dest='p2pool_nodes')
     parser.add_argument('--disable-upnp',
-        help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
+        help='''don't attempt to forward p2pool P2P port from the WAN to this computer using UPnP''',
         action='store_false', default=True, dest='upnp')
     
     worker_group = parser.add_argument_group('worker interface')
     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
-        help='listen on PORT for RPC connections from miners asking for work and providing responses (default: bitcoin: 9332 namecoin: 9331 ixcoin: 9330 i0coin: 9329 solidcoin: 9328 litecoin: 9327, +10000 for testnets)',
+        help='listen on PORT for RPC connections from miners asking for work and providing responses (default:%s)' % ', '.join('%s:%i' % (n.NAME, n.WORKER_PORT) for _, n in sorted(networks.realnets.items())),
         type=int, action='store', default=None, dest='worker_port')
     worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
-        help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:9332/fee . default: 0''',
+        help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee . default: 0''',
         type=float, action='store', default=0, dest='worker_fee')
     
     bitcoind_group = parser.add_argument_group('bitcoind interface')
@@ -745,10 +743,10 @@ def run():
         help='connect to a bitcoind at this address (default: 127.0.0.1)',
         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
-        help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getmemorypool (default: 8332 ixcoin: 8338 i0coin: 7332 litecoin: 9332)',
+        help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getmemorypool (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.BITCOIN_RPC_PORT) for _, n in sorted(networks.realnets.items())),
         type=int, action='store', default=None, dest='bitcoind_rpc_port')
     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
-        help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 namecoin: 8334 ixcoin: 8337 i0coin: 7333 solidcoin: 7555 litecoin: 9333, +10000 for testnets)',
+        help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.BITCOIN_P2P_PORT) for _, n in sorted(networks.realnets.items())),
         type=int, action='store', default=None, dest='bitcoind_p2p_port')
     
     bitcoind_group.add_argument(metavar='BITCOIND_RPCUSER',
@@ -765,8 +763,12 @@ def run():
     
     net = networks.nets[args.net_name + ('_testnet' if args.testnet else '')]
     
+    datadir_path = os.path.join(os.path.dirname(sys.argv[0]), 'data', net.NAME)
+    if not os.path.exists(datadir_path):
+        os.makedirs(datadir_path)
+    
     if args.logfile is None:
-        args.logfile = os.path.join(os.path.dirname(sys.argv[0]), net.NAME + '.log')
+        args.logfile = os.path.join(datadir_path, 'log')
     
     class LogFile(object):
         def __init__(self, filename):
@@ -851,5 +853,5 @@ def run():
     if (args.merged_url is None) ^ (args.merged_userpass is None):
         parser.error('must specify --merged-url and --merged-userpass')
     
-    reactor.callWhenRunning(main, args, net)
+    reactor.callWhenRunning(main, args, net, datadir_path)
     reactor.run()