1 from __future__ import division
6 from twisted.internet import defer, reactor
7 from twisted.python import failure
8 from twisted.trial import unittest
9 from twisted.web import client, resource, server
11 from p2pool import data, node, work
12 from p2pool.bitcoin import data as bitcoin_data, networks, worker_interface
13 from p2pool.util import deferral, jsonrpc, math, variable
15 class bitcoind(object): # can be used as p2p factory, p2p protocol, or rpc jsonrpc proxy
17 self.blocks = [0x000000000000016c169477c25421250ec5d32cf9c6d38538b5de970a2355fd89]
18 self.headers = {0x16c169477c25421250ec5d32cf9c6d38538b5de970a2355fd89: {
20 'timestamp': 1351658517,
21 'merkle_root': 2282849479936278423916707524932131168473430114569971665822757638339486597658L,
23 'previous_block': 1048610514577342396345362905164852351970507722694242579238530L,
24 'bits': bitcoin_data.FloatingInteger(bits=0x1a0513c5, target=0x513c50000000000000000000000000000000000000000000000L),
27 self.conn = variable.Variable(self)
28 self.new_headers = variable.Event()
29 self.new_block = variable.Event()
30 self.new_tx = variable.Event()
34 def getProtocol(self):
39 def send_block(self, block):
42 def send_tx(self, tx):
45 def get_block_header(self, block_hash):
46 return self.headers[block_hash]
53 def rpc_getblock(self, block_hash_hex):
54 block_hash = int(block_hash_hex, 16)
55 return dict(height=self.blocks.index(block_hash))
57 def __getattr__(self, name):
58 if name.startswith('rpc_'):
59 return lambda *args, **kwargs: failure.Failure(jsonrpc.Error_for_code(-32601)('Method not found'))
61 def rpc_getblocktemplate(self, param):
62 if param['mode'] == 'template':
64 elif param['mode'] == 'submit':
65 result = param['data']
66 block = bitcoin_data.block_type.unpack(result.decode('hex'))
67 if sum(tx_out['value'] for tx_out in block['txs'][0]['tx_outs']) != sum(tx['tx_outs'][0]['value'] for tx in block['txs'][1:]) + 5000000000:
69 if block['header']['previous_block'] != self.blocks[-1]:
71 if bitcoin_data.hash256(result.decode('hex')) > block['header']['bits'].target:
73 header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(block['header']))
74 self.blocks.append(header_hash)
75 self.headers[header_hash] = block['header']
76 reactor.callLater(0, self.new_block.happened)
79 raise jsonrpc.Error_for_code(-1)('invalid request')
85 data=bitcoin_data.tx_type.pack(dict(version=1, tx_ins=[], tx_outs=[dict(value=fee, script='hello!'*100)], lock_time=0)).encode('hex'),
90 "previousblockhash" : '%064x' % (self.blocks[-1],),
93 "flags" : "062f503253482f"
95 "coinbasevalue" : 5000000000 + sum(tx['fee'] for tx in txs),
96 "target" : "0000000000000513c50000000000000000000000000000000000000000000000",
97 "mintime" : 1351655621,
103 "noncerange" : "00000000ffffffff",
104 "sigoplimit" : 20000,
105 "sizelimit" : 1000000,
106 "curtime" : 1351659940,
108 "height" : len(self.blocks),
112 class mm_provider(object):
113 def __getattr__(self, name):
114 print '>>>>>>>', name
115 def rpc_getauxblock(self, request, result1=None, result2=None):
116 if result1 is not None:
117 print result1, result2
120 "target" : "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", # 2**256*2/3
121 "hash" : "2756ea0315d46dc3d8d974f34380873fc88863845ac01a658ef11bc3b368af52",
126 PARENT=networks.nets['litecoin_testnet'],
127 SHARE_PERIOD=3, # seconds
128 CHAIN_LENGTH=20*60//3, # shares
129 REAL_CHAIN_LENGTH=20*60//3, # shares
130 TARGET_LOOKBEHIND=200, # shares
132 IDENTIFIER='cca5e24ec6408b1e'.decode('hex'),
133 PREFIX='ad9614f6466a39cf'.decode('hex'),
135 MIN_TARGET=2**256 - 1,
136 MAX_TARGET=2**256 - 1,
139 BOOTSTRAP_ADDRS='72.14.191.28'.split(' '),
140 ANNOUNCE_CHANNEL='#p2pool-alt',
141 VERSION_CHECK=lambda v: True,
144 class MiniNode(object):
146 @defer.inlineCallbacks
147 def start(cls, net, factory, bitcoind, peer_ports, merged_urls):
150 self.n = node.Node(factory, bitcoind, [], [], net)
153 self.n.p2p_node = node.P2PNode(self.n, port=0, max_incoming_conns=1000000, addr_store={}, connect_addrs=[('127.0.0.1', peer_port) for peer_port in peer_ports])
154 self.n.p2p_node.start()
156 wb = work.WorkerBridge(node=self.n, my_pubkey_hash=random.randrange(2**160), donation_percentage=random.uniform(0, 10), merged_urls=merged_urls, worker_fee=3)
158 web_root = resource.Resource()
159 worker_interface.WorkerInterface(wb).attach_to(web_root)
160 self.web_port = reactor.listenTCP(0, server.Site(web_root))
162 defer.returnValue(self)
164 @defer.inlineCallbacks
166 yield self.web_port.stopListening()
167 yield self.n.p2p_node.stop()
169 del self.web_port, self.n
171 class Test(unittest.TestCase):
172 @defer.inlineCallbacks
176 mm_root = resource.Resource()
177 mm_root.putChild('', jsonrpc.HTTPServer(mm_provider))
178 mm_port = reactor.listenTCP(0, server.Site(mm_root))
180 n = node.Node(bitd, bitd, [], [], mynet)
183 wb = work.WorkerBridge(node=n, my_pubkey_hash=42, donation_percentage=2, merged_urls=[('http://127.0.0.1:%i' % (mm_port.getHost().port,), '')], worker_fee=3)
184 web_root = resource.Resource()
185 worker_interface.WorkerInterface(wb).attach_to(web_root)
186 port = reactor.listenTCP(0, server.Site(web_root))
188 proxy = jsonrpc.HTTPProxy('http://127.0.0.1:' + str(port.getHost().port))
190 yield deferral.sleep(3)
192 for i in xrange(100):
193 blah = yield proxy.rpc_getwork()
194 yield proxy.rpc_getwork(blah['data'])
197 yield deferral.sleep(3)
199 assert len(n.tracker.items) == 100
200 assert n.tracker.verified.get_height(n.best_share_var.value) == 100
205 yield port.stopListening()
206 del n, wb, web_root, port, proxy
212 yield deferral.sleep(20) # waiting for work_poller to exit
213 yield mm_port.stopListening()
214 #test_node.timeout = 15
216 @defer.inlineCallbacks
217 def test_nodes(self):
225 nodes.append((yield MiniNode.start(mynet, bitd, bitd, [mn.n.p2p_node.serverfactory.listen_port.getHost().port for mn in nodes], [])))
227 yield deferral.sleep(3)
229 for i in xrange(SHARES):
230 proxy = jsonrpc.HTTPProxy('http://127.0.0.1:' + str(random.choice(nodes).web_port.getHost().port))
231 blah = yield proxy.rpc_getwork()
232 yield proxy.rpc_getwork(blah['data'])
233 yield deferral.sleep(.05)
235 print type(nodes[0].n.tracker.items[nodes[0].n.best_share_var.value])
238 from p2pool import web
239 stop_event = variable.Event()
240 web2_root = web.get_web_root(nodes[0].wb, tempfile.mkdtemp(), variable.Variable(None), stop_event)
241 web2_port = reactor.listenTCP(0, server.Site(web2_root))
242 for name in web2_root.listNames() + ['web/' + x for x in web2_root.getChildWithDefault('web', None).listNames()]:
243 if name in ['web/graph_data', 'web/share', 'web/share_data']: continue
247 res = yield client.getPage('http://127.0.0.1:%i/%s' % (web2_port.getHost().port, name))
250 traceback.print_exc()
252 print repr(res)[:100]
254 yield web2_port.stopListening()
255 stop_event.happened()
258 yield deferral.sleep(3)
260 for i, n in enumerate(nodes):
261 assert len(n.n.tracker.items) == SHARES, (i, len(n.n.tracker.items))
262 assert n.n.tracker.verified.get_height(n.n.best_share_var.value) == SHARES, (i, n.n.tracker.verified.get_height(n.n.best_share_var.value))
263 assert type(n.n.tracker.items[nodes[0].n.best_share_var.value]) is (data.Share.SUCCESSOR if data.Share.SUCCESSOR is not None else data.Share)
264 assert type(n.n.tracker.items[n.n.tracker.get_nth_parent_hash(nodes[0].n.best_share_var.value, SHARES - 5)]) is data.Share
275 yield deferral.sleep(20) # waiting for work_poller to exit
276 test_nodes.timeout = 300