store shares to disk and load them on startup
[p2pool.git] / p2pool / main.py
1 #!/usr/bin/python
2
3 from __future__ import division
4
5 import argparse
6 import datetime
7 import itertools
8 import os
9 import random
10 import sqlite3
11 import struct
12 import sys
13 import time
14 import json
15 import signal
16
17 from twisted.internet import defer, reactor
18 from twisted.web import server, resource
19 from twisted.python import log
20 from nattraverso import portmapper, ipdiscover
21
22 import bitcoin.p2p, bitcoin.getwork, bitcoin.data
23 from util import db, expiring_dict, jsonrpc, variable, deferral, math
24 from . import p2p, worker_interface, skiplists
25 import p2pool.data as p2pool
26 import p2pool as p2pool_init
27
28 @deferral.retry('Error getting work from bitcoind:', 3)
29 @defer.inlineCallbacks
30 def getwork(bitcoind):
31     # a block could arrive in between these two queries
32     getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
33     try:
34         getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
35     finally:
36         # get rid of residual errors
37         getwork_df.addErrback(lambda fail: None)
38         height_df.addErrback(lambda fail: None)
39     defer.returnValue((getwork, height))
40
41 @deferral.retry('Error getting payout script from bitcoind:', 1)
42 @defer.inlineCallbacks
43 def get_payout_script(factory):
44     res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
45     if res['reply'] == 'success':
46         defer.returnValue(res['script'])
47     elif res['reply'] == 'denied':
48         defer.returnValue(None)
49     else:
50         raise ValueError('Unexpected reply: %r' % (res,))
51
52 @deferral.retry('Error creating payout script:', 10)
53 @defer.inlineCallbacks
54 def get_payout_script2(bitcoind, net):
55     defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
56
57 @defer.inlineCallbacks
58 def main(args):
59     try:
60         if args.charts:
61             from . import draw
62         
63         print 'p2pool (version %s)' % (p2pool_init.__version__,)
64         print
65         
66         # connect to bitcoind over JSON-RPC and do initial getwork
67         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
68         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
69         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
70         temp_work, temp_height = yield getwork(bitcoind)
71         print '    ...success!'
72         print '    Current block hash: %x height: %i' % (temp_work.previous_block, temp_height)
73         print
74         
75         # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
76         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
77         factory = bitcoin.p2p.ClientFactory(args.net)
78         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
79         my_script = yield get_payout_script(factory)
80         if args.pubkey_hash is None:
81             if my_script is None:
82                 print '    IP transaction denied ... falling back to sending to address.'
83                 my_script = yield get_payout_script2(bitcoind, args.net)
84         else:
85             my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
86         print '    ...success!'
87         print '    Payout script:', my_script.encode('hex')
88         print
89         
90         print 'Loading cached block headers...'
91         ht = bitcoin.p2p.HeightTracker(factory, args.net.HEADERSTORE_FILENAME)
92         print '   ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
93         print
94         
95         tracker = p2pool.OkayTracker(args.net)
96         ss = p2pool.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.SHARESTORE_FILENAME), args.net)
97         print "Loading shares..."
98         for i, share in enumerate(ss.get_shares()):
99             if share.hash in tracker.shares:
100                 continue
101             share.shared = True
102             share.stored = True
103             tracker.add(share, known_verified=True)
104             if len(tracker.shares) % 1000 == 0 and tracker.shares:
105                 print "    %i" % (len(tracker.shares),)
106         print "    ...done!"
107         print
108         tracker.verified.added.watch(ss.add_share)
109         
110         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
111         
112         # information affecting work that should trigger a long-polling update
113         current_work = variable.Variable(None)
114         # information affecting work that should not trigger a long-polling update
115         current_work2 = variable.Variable(None)
116         
117         work_updated = variable.Event()
118         
119         requested = expiring_dict.ExpiringDict(300)
120         
121         @defer.inlineCallbacks
122         def set_real_work1():
123             work, height = yield getwork(bitcoind)
124             changed = work.previous_block != current_work.value['previous_block'] if current_work.value is not None else True
125             current_work.set(dict(
126                 version=work.version,
127                 previous_block=work.previous_block,
128                 target=work.target,
129                 height=height,
130                 best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None,
131             ))
132             current_work2.set(dict(
133                 clock_offset=time.time() - work.timestamp,
134             ))
135             if changed:
136                 set_real_work2()
137         
138         def set_real_work2():
139             best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
140             
141             t = dict(current_work.value)
142             t['best_share_hash'] = best
143             current_work.set(t)
144             
145             t = time.time()
146             for peer2, share_hash in desired:
147                 if share_hash not in tracker.tails: # was received in the time tracker.think was running
148                     continue
149                 last_request_time, count = requested.get(share_hash, (None, 0))
150                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
151                     continue
152                 potential_peers = set()
153                 for head in tracker.tails[share_hash]:
154                     potential_peers.update(peer_heads.get(head, set()))
155                 potential_peers = [peer for peer in potential_peers if peer.connected2]
156                 if count == 0 and peer2 is not None and peer2.connected2:
157                     peer = peer2
158                 else:
159                     peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
160                     if peer is None:
161                         continue
162                 
163                 print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
164                 peer.send_getshares(
165                     hashes=[share_hash],
166                     parents=2000,
167                     stops=list(set(tracker.heads) | set(
168                         tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
169                     ))[:100],
170                 )
171                 requested[share_hash] = t, count + 1
172         
173         print 'Initializing work...'
174         yield set_real_work1()
175         set_real_work2()
176         print '    ...success!'
177         print
178         
179         start_time = time.time() - current_work2.value['clock_offset']
180         
181         # setup p2p logic and join p2pool network
182         
183         def share_share(share, ignore_peer=None):
184             for peer in p2p_node.peers.itervalues():
185                 if peer is ignore_peer:
186                     continue
187                 #if p2pool_init.DEBUG:
188                 #    print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
189                 peer.send_shares([share])
190             share.flag_shared()
191         
192         def p2p_shares(shares, peer=None):
193             if len(shares) > 5:
194                 print 'Processing %i shares...' % (len(shares),)
195             
196             some_new = False
197             for share in shares:
198                 if share.hash in tracker.shares:
199                     #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
200                     continue
201                 some_new = True
202                 
203                 #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
204                 
205                 tracker.add(share)
206                 #for peer2, share_hash in desired:
207                 #    print 'Requesting parent share %x' % (share_hash,)
208                 #    peer2.send_getshares(hashes=[share_hash], parents=2000)
209                 
210                 if share.bitcoin_hash <= share.header['target']:
211                     print
212                     print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
213                     print
214                     if factory.conn.value is not None:
215                         factory.conn.value.send_block(block=share.as_block(tracker, args.net))
216                     else:
217                         print 'No bitcoind connection! Erp!'
218             
219             if shares and peer is not None:
220                 peer_heads.setdefault(shares[0].hash, set()).add(peer)
221             
222             if some_new:
223                 set_real_work2()
224             
225             if len(shares) > 5:
226                 print '... done processing %i shares. Have: %i/~%i' % (len(shares), len(tracker.shares), 2*args.net.CHAIN_LENGTH)
227         
228         def p2p_share_hashes(share_hashes, peer):
229             t = time.time()
230             get_hashes = []
231             for share_hash in share_hashes:
232                 if share_hash in tracker.shares:
233                     continue
234                 last_request_time, count = requested.get(share_hash, (None, 0))
235                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
236                     continue
237                 print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
238                 get_hashes.append(share_hash)
239                 requested[share_hash] = t, count + 1
240             
241             if share_hashes and peer is not None:
242                 peer_heads.setdefault(share_hashes[0], set()).add(peer)
243             if get_hashes:
244                 peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
245         
246         def p2p_get_shares(share_hashes, parents, stops, peer):
247             parents = min(parents, 1000//len(share_hashes))
248             stops = set(stops)
249             shares = []
250             for share_hash in share_hashes:
251                 for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1):
252                     if share.hash in stops:
253                         break
254                     shares.append(share)
255             peer.send_shares(shares, full=True)
256         
257         print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,)
258         
259         def parse(x):
260             if ':' in x:
261                 ip, port = x.split(':')
262                 return ip, int(port)
263             else:
264                 return x, args.net.P2P_PORT
265         
266         nodes = set([
267             ('72.14.191.28', args.net.P2P_PORT),
268             ('62.204.197.159', args.net.P2P_PORT),
269             ('142.58.248.28', args.net.P2P_PORT),
270             ('94.23.34.145', args.net.P2P_PORT),
271         ])
272         for host in [
273             'p2pool.forre.st',
274             'dabuttonfactory.com',
275         ]:
276             try:
277                 nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
278             except:
279                 log.err(None, 'Error resolving bootstrap node IP:')
280         
281         p2p_node = p2p.Node(
282             current_work=current_work,
283             port=args.p2pool_port,
284             net=args.net,
285             addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE),
286             mode=0 if args.low_bandwidth else 1,
287             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
288         )
289         p2p_node.handle_shares = p2p_shares
290         p2p_node.handle_share_hashes = p2p_share_hashes
291         p2p_node.handle_get_shares = p2p_get_shares
292         
293         p2p_node.start()
294         
295         # send share when the chain changes to their chain
296         def work_changed(new_work):
297             #print 'Work changed:', new_work
298             for share in tracker.get_chain_known(new_work['best_share_hash']):
299                 if share.shared:
300                     break
301                 share_share(share, share.peer)
302         current_work.changed.watch(work_changed)
303         
304         print '    ...success!'
305         print
306         
307         @defer.inlineCallbacks
308         def upnp_thread():
309             while True:
310                 try:
311                     is_lan, lan_ip = yield ipdiscover.get_local_ip()
312                     if not is_lan:
313                         continue
314                     pm = yield portmapper.get_port_mapper()
315                     yield pm._upnp.add_port_mapping(lan_ip, args.net.P2P_PORT, args.net.P2P_PORT, 'p2pool', 'TCP')
316                 except:
317                     if p2pool_init.DEBUG:
318                         log.err(None, "UPnP error:")
319                 yield deferral.sleep(random.expovariate(1/120))
320         
321         if args.upnp:
322             upnp_thread()
323         
324         # start listening for workers with a JSON-RPC server
325         
326         print 'Listening for workers on port %i...' % (args.worker_port,)
327         
328         # setup worker logic
329         
330         merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
331         run_identifier = struct.pack('<Q', random.randrange(2**64))
332         
333         def compute(state, payout_script):
334             if payout_script is None:
335                 payout_script = my_script
336             if state['best_share_hash'] is None and args.net.PERSIST:
337                 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
338             pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()]
339             pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit
340             extra_txs = []
341             size = 0
342             for tx in pre_extra_txs:
343                 this_size = len(bitcoin.data.tx_type.pack(tx.tx))
344                 if size + this_size > 500000:
345                     break
346                 extra_txs.append(tx)
347                 size += this_size
348             # XXX check sigops!
349             # XXX assuming generate_tx is smallish here..
350             generate_tx = p2pool.generate_transaction(
351                 tracker=tracker,
352                 previous_share_hash=state['best_share_hash'],
353                 new_script=payout_script,
354                 subsidy=(50*100000000 >> (state['height'] + 1)//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs),
355                 nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
356                 block_target=state['target'],
357                 net=args.net,
358             )
359             print 'Generating! Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8)
360             #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
361             #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
362             transactions = [generate_tx] + [tx.tx for tx in extra_txs]
363             merkle_root = bitcoin.data.merkle_hash(transactions)
364             merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
365             
366             timestamp = int(time.time() - current_work2.value['clock_offset'])
367             if state['best_share_hash'] is not None:
368                 timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
369                 if timestamp2 > timestamp:
370                     print 'Toff', timestamp2 - timestamp
371                     timestamp = timestamp2
372             target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
373             times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
374             #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
375             return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
376         
377         my_shares = set()
378         times = {}
379         
380         def got_response(data):
381             try:
382                 # match up with transactions
383                 header = bitcoin.getwork.decode_data(data)
384                 transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
385                 if transactions is None:
386                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
387                     return False
388                 block = dict(header=header, txs=transactions)
389                 hash_ = bitcoin.data.block_header_type.hash256(block['header'])
390                 if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
391                     if factory.conn.value is not None:
392                         factory.conn.value.send_block(block=block)
393                     else:
394                         print 'No bitcoind connection! Erp!'
395                     if hash_ <= block['header']['target']:
396                         print
397                         print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
398                         print
399                 target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
400                 if hash_ > target:
401                     print 'Received invalid share from worker - %x/%x' % (hash_, target)
402                     return False
403                 share = p2pool.Share.from_block(block)
404                 my_shares.add(share.hash)
405                 print 'GOT SHARE! %s prev %s age %.2fs' % (p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
406                 good = share.previous_hash == current_work.value['best_share_hash']
407                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
408                 p2p_shares([share])
409                 # eg. good = share.hash == current_work.value['best_share_hash'] here
410                 return good
411             except:
412                 log.err(None, 'Error processing data received from worker:')
413                 return False
414         
415         web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
416         
417         def get_rate():
418             if current_work.value['best_share_hash'] is not None:
419                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
420                 att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height, 720))
421                 return json.dumps(att_s)
422             return json.dumps(None)
423         
424         def get_users():
425             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
426             weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
427             res = {}
428             for script in sorted(weights, key=lambda s: weights[s]):
429                 res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
430             return json.dumps(res)
431         
432         class WebInterface(resource.Resource):
433             def __init__(self, func, mime_type):
434                 self.func, self.mime_type = func, mime_type
435             
436             def render_GET(self, request):
437                 request.setHeader('Content-Type', self.mime_type)
438                 return self.func()
439         
440         web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
441         web_root.putChild('users', WebInterface(get_users, 'application/json'))
442         if args.charts:
443             web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png'))
444         
445         reactor.listenTCP(args.worker_port, server.Site(web_root))
446         
447         print '    ...success!'
448         print
449         
450         # done!
451         
452         tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx
453         get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100))
454         
455         class Tx(object):
456             def __init__(self, tx, seen_at_block):
457                 self.hash = bitcoin.data.tx_type.hash256(tx)
458                 self.tx = tx
459                 self.seen_at_block = seen_at_block
460                 self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
461                 #print
462                 #print '%x %r' % (seen_at_block, tx)
463                 #for mention in self.mentions:
464                 #    print '%x' % mention
465                 #print
466                 self.parents_all_in_blocks = False
467                 self.value_in = 0
468                 #print self.tx
469                 self.value_out = sum(txout['value'] for txout in self.tx['tx_outs'])
470                 self._find_parents_in_blocks()
471             
472             @defer.inlineCallbacks
473             def _find_parents_in_blocks(self):
474                 for tx_in in self.tx['tx_ins']:
475                     try:
476                         raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash'])
477                     except Exception:
478                         return
479                     self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value']
480                     #print raw_transaction
481                     if not raw_transaction['parent_blocks']:
482                         return
483                 self.parents_all_in_blocks = True
484             
485             def is_good(self):
486                 if not self.parents_all_in_blocks:
487                     return False
488                 x = self.is_good2()
489                 #print 'is_good:', x
490                 return x
491         
492         @defer.inlineCallbacks
493         def new_tx(tx_hash):
494             try:
495                 assert isinstance(tx_hash, (int, long))
496                 #print 'REQUESTING', tx_hash
497                 tx = yield (yield factory.getProtocol()).get_tx(tx_hash)
498                 #print 'GOT', tx
499                 tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block'])
500             except:
501                 log.err(None, 'Error handling tx:')
502         # disable for now, for testing impact on stales
503         #factory.new_tx.watch(new_tx)
504         
505         def new_block(block_hash):
506             work_updated.happened()
507         factory.new_block.watch(new_block)
508         
509         print 'Started successfully!'
510         print
511         
512         ht.updated.watch(set_real_work2)
513         
514         @defer.inlineCallbacks
515         def work1_thread():
516             while True:
517                 flag = work_updated.get_deferred()
518                 try:
519                     yield set_real_work1()
520                 except:
521                     log.err()
522                 yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/20))], fireOnOneCallback=True)
523         
524         @defer.inlineCallbacks
525         def work2_thread():
526             while True:
527                 try:
528                     set_real_work2()
529                 except:
530                     log.err()
531                 yield deferral.sleep(random.expovariate(1/20))
532         
533         work1_thread()
534         work2_thread()
535         
536         counter = skiplists.CountsSkipList(tracker, run_identifier)
537         
538         while True:
539             yield deferral.sleep(3)
540             try:
541                 if current_work.value['best_share_hash'] is not None:
542                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
543                     if height > 2:
544                         att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 120))
545                         weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100)
546                         matching_in_chain = counter(current_work.value['best_share_hash'], height)
547                         shares_in_chain = my_shares & matching_in_chain
548                         stale_shares = my_shares - matching_in_chain
549                         print 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i stale) Peers: %i' % (
550                             math.format(att_s),
551                             height,
552                             len(tracker.verified.shares),
553                             len(tracker.shares),
554                             weights.get(my_script, 0)/total_weight*100,
555                             math.format(weights.get(my_script, 0)/total_weight*att_s),
556                             len(shares_in_chain) + len(stale_shares),
557                             len(stale_shares),
558                             len(p2p_node.peers),
559                         ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
560                         #weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 100), 2**100)
561                         #for k, v in weights.iteritems():
562                         #    print k.encode('hex'), v/total_weight
563             except:
564                 log.err()
565     except:
566         log.err(None, 'Fatal error:')
567         reactor.stop()
568
569 def run():
570     parser = argparse.ArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,))
571     parser.add_argument('--version', action='version', version=p2pool_init.__version__)
572     parser.add_argument('--testnet',
573         help='use the testnet',
574         action='store_const', const=p2pool.Testnet, default=p2pool.Mainnet, dest='net')
575     parser.add_argument('--debug',
576         help='debugging mode',
577         action='store_const', const=True, default=False, dest='debug')
578     parser.add_argument('-a', '--address',
579         help='generate to this address (defaults to requesting one from bitcoind)',
580         type=str, action='store', default=None, dest='address')
581     parser.add_argument('--charts',
582         help='generate charts on the web interface (requires PIL and pygame)',
583         action='store_const', const=True, default=False, dest='charts')
584     
585     p2pool_group = parser.add_argument_group('p2pool interface')
586     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
587         help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
588         type=int, action='store', default=None, dest='p2pool_port')
589     p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
590         help='connect to existing p2pool node at ADDR listening on TCP port PORT (defaults to 9333 normally, 19333 for testnet), in addition to builtin addresses',
591         type=str, action='append', default=[], dest='p2pool_nodes')
592     parser.add_argument('-l', '--low-bandwidth',
593         help='trade lower bandwidth usage for higher latency (reduced efficiency)',
594         action='store_true', default=False, dest='low_bandwidth')
595     parser.add_argument('--disable-upnp',
596         help='''don't attempt to forward port 9333 (19333 for testnet) from the WAN to this computer using UPnP''',
597         action='store_false', default=True, dest='upnp')
598     
599     worker_group = parser.add_argument_group('worker interface')
600     worker_group.add_argument('-w', '--worker-port', metavar='PORT',
601         help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 9332)',
602         type=int, action='store', default=9332, dest='worker_port')
603     
604     bitcoind_group = parser.add_argument_group('bitcoind interface')
605     bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
606         help='connect to a bitcoind at this address (default: 127.0.0.1)',
607         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
608     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
609         help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332)',
610         type=int, action='store', default=8332, dest='bitcoind_rpc_port')
611     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
612         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 normally. 18333 for testnet)',
613         type=int, action='store', default=None, dest='bitcoind_p2p_port')
614     
615     bitcoind_group.add_argument(metavar='BITCOIND_RPC_USERNAME',
616         help='bitcoind RPC interface username',
617         type=str, action='store', dest='bitcoind_rpc_username')
618     bitcoind_group.add_argument(metavar='BITCOIND_RPC_PASSWORD',
619         help='bitcoind RPC interface password',
620         type=str, action='store', dest='bitcoind_rpc_password')
621     
622     args = parser.parse_args()
623     
624     if args.debug:
625         p2pool_init.DEBUG = True
626         class ReopeningFile(object):
627             def __init__(self, *open_args, **open_kwargs):
628                 self.open_args, self.open_kwargs = open_args, open_kwargs
629                 self.inner_file = open(*self.open_args, **self.open_kwargs)
630             def reopen(self):
631                 self.inner_file.close()
632                 self.inner_file = open(*self.open_args, **self.open_kwargs)
633             def write(self, data):
634                 self.inner_file.write(data)
635             def flush(self):
636                 self.inner_file.flush()
637         class TeePipe(object):
638             def __init__(self, outputs):
639                 self.outputs = outputs
640             def write(self, data):
641                 for output in self.outputs:
642                     output.write(data)
643             def flush(self):
644                 for output in self.outputs:
645                     output.flush()
646         class TimestampingPipe(object):
647             def __init__(self, inner_file):
648                 self.inner_file = inner_file
649                 self.buf = ''
650                 self.softspace = 0
651             def write(self, data):
652                 buf = self.buf + data
653                 lines = buf.split('\n')
654                 for line in lines[:-1]:
655                     self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line))
656                     self.inner_file.flush()
657                 self.buf = lines[-1]
658             def flush(self):
659                 pass
660         logfile = ReopeningFile(os.path.join(os.path.dirname(sys.argv[0]), 'debug.log'), 'w')
661         sys.stdout = sys.stderr = log.DefaultObserver.stderr = TimestampingPipe(TeePipe([sys.stderr, logfile]))
662         if hasattr(signal, "SIGUSR1"):
663             def sigusr1(signum, frame):
664                 print '''Caught SIGUSR1, closing 'debug.log'...'''
665                 logfile.reopen()
666                 print '''...and reopened 'debug.log' after catching SIGUSR1.'''
667             signal.signal(signal.SIGUSR1, sigusr1)
668     
669     if args.bitcoind_p2p_port is None:
670         args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
671     
672     if args.p2pool_port is None:
673         args.p2pool_port = args.net.P2P_PORT
674     
675     if args.address is not None:
676         try:
677             args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
678         except Exception, e:
679             raise ValueError('error parsing address: ' + repr(e))
680     else:
681         args.pubkey_hash = None
682     
683     reactor.callWhenRunning(main, args)
684     reactor.run()