this bitcoin over both its p2p interface and its RPC interface. For standard
configurations, using p2p should be as simple as:
- python main.py <rpc_username> <rpc_password>
+ python run_p2pool.py <rpc_username> <rpc_password>
Then run your miner program, connecting to 127.0.0.1 on port 9332 with any
username and password.
Use
- python main.py --help
+ python run_p2pool.py --help
for additional options.
+++ /dev/null
-'''
-Implementation of Bitcoin's p2p protocol
-'''
-
-from __future__ import division
-
-import hashlib
-import random
-import StringIO
-import socket
-import struct
-import time
-import traceback
-
-from twisted.internet import defer, protocol, reactor
-
-import expiring_dict
-import util
-
-class EarlyEnd(Exception):
- pass
-
-class LateEnd(Exception):
- pass
-
-class Type(object):
- # the same data can have only one unpacked representation, but multiple packed binary representations
-
- def _unpack(self, data):
- f = StringIO.StringIO(data)
-
- obj = self.read(f)
-
- if f.tell() != len(data):
- raise LateEnd('underread ' + repr((self, data)))
-
- return obj
-
- def unpack(self, data):
- obj = self._unpack(data)
- assert self._unpack(self._pack(obj)) == obj
- return obj
-
- def _pack(self, obj):
- f = StringIO.StringIO()
-
- self.write(f, obj)
-
- data = f.getvalue()
-
- return data
-
- def pack(self, obj):
- data = self._pack(obj)
- assert self._unpack(data) == obj
- return data
-
-class VarIntType(Type):
- def read(self, file):
- data = file.read(1)
- if len(data) != 1:
- raise EarlyEnd()
- first, = struct.unpack('<B', data)
- if first == 0xff: desc = '<Q'
- elif first == 0xfe: desc = '<I'
- elif first == 0xfd: desc = '<H'
- else: return first
- length = struct.calcsize(desc)
- data = file.read(length)
- if len(data) != length:
- raise EarlyEnd()
- return struct.unpack(desc, data)[0]
-
- def write(self, file, item):
- if item < 0xfd:
- file.write(struct.pack('<B', item))
- elif item <= 0xffff:
- file.write(struct.pack('<BH', 0xfd, item))
- elif item <= 0xffffffff:
- file.write(struct.pack('<BI', 0xfe, item))
- elif item <= 0xffffffffffffffff:
- file.write(struct.pack('<BQ', 0xff, item))
- else:
- raise ValueError('int too large for varint')
-
-class VarStrType(Type):
- def read(self, file):
- length = VarIntType().read(file)
- res = file.read(length)
- if len(res) != length:
- raise EarlyEnd('var str not long enough %r' % ((length, len(res), res),))
- return res
-
- def write(self, file, item):
- VarIntType().write(file, len(item))
- file.write(item)
-
-class FixedStrType(Type):
- def __init__(self, length):
- self.length = length
-
- def read(self, file):
- res = file.read(self.length)
- if len(res) != self.length:
- raise EarlyEnd('early EOF!')
- return res
-
- def write(self, file, item):
- if len(item) != self.length:
- raise ValueError('incorrect length!')
- file.write(item)
-
-class EnumType(Type):
- def __init__(self, inner, values):
- self.inner = inner
- self.values = values
-
- self.keys = {}
- for k, v in values.iteritems():
- if v in self.keys:
- raise ValueError('duplicate value in values')
- self.keys[v] = k
-
- def read(self, file):
- return self.keys[self.inner.read(file)]
-
- def write(self, file, item):
- self.inner.write(file, self.values[item])
-
-class HashType(Type):
- def read(self, file):
- data = file.read(256//8)
- if len(data) != 256//8:
- raise EarlyEnd('incorrect length!')
- return int(data[::-1].encode('hex'), 16)
-
- def write(self, file, item):
- file.write(('%064x' % (item,)).decode('hex')[::-1])
-
-class ShortHashType(Type):
- def read(self, file):
- data = file.read(160//8)
- if len(data) != 160//8:
- raise EarlyEnd('incorrect length!')
- return int(data[::-1].encode('hex'), 16)
-
- def write(self, file, item):
- file.write(('%020x' % (item,)).decode('hex')[::-1])
-
-class ListType(Type):
- def __init__(self, type):
- self.type = type
-
- def read(self, file):
- length = VarIntType().read(file)
- return [self.type.read(file) for i in xrange(length)]
-
- def write(self, file, item):
- VarIntType().write(file, len(item))
- for subitem in item:
- self.type.write(file, subitem)
-
-class StructType(Type):
- def __init__(self, desc):
- self.desc = desc
- self.length = struct.calcsize(self.desc)
-
- def read(self, file):
- data = file.read(self.length)
- if len(data) != self.length:
- raise EarlyEnd()
- res, = struct.unpack(self.desc, data)
- return res
-
- def write(self, file, item):
- data = struct.pack(self.desc, item)
- if struct.unpack(self.desc, data)[0] != item:
- # special test because struct doesn't error on some overflows
- raise ValueError("item didn't survive pack cycle (%r)" % (item,))
- file.write(data)
-
-class IPV6AddressType(Type):
- def read(self, file):
- data = file.read(16)
- if len(data) != 16:
- raise EarlyEnd()
- if data[:12] != '00000000000000000000ffff'.decode('hex'):
- raise ValueError("ipv6 addresses not supported yet")
- return '::ffff:' + '.'.join(str(ord(x)) for x in data[12:])
-
- def write(self, file, item):
- prefix = '::ffff:'
- if not item.startswith(prefix):
- raise ValueError("ipv6 addresses not supported yet")
- item = item[len(prefix):]
- bits = map(int, item.split('.'))
- if len(bits) != 4:
- raise ValueError("invalid address: %r" % (bits,))
- data = '00000000000000000000ffff'.decode('hex') + ''.join(chr(x) for x in bits)
- assert len(data) == 16, len(data)
- file.write(data)
-
-class ComposedType(Type):
- def __init__(self, fields):
- self.fields = fields
-
- def read(self, file):
- item = {}
- for key, type_ in self.fields:
- item[key] = type_.read(file)
- return item
-
- def write(self, file, item):
- for key, type_ in self.fields:
- type_.write(file, item[key])
-
-address_type = ComposedType([
- ('services', StructType('<Q')),
- ('address', IPV6AddressType()),
- ('port', StructType('>H')),
-])
-
-tx_type = ComposedType([
- ('version', StructType('<I')),
- ('tx_ins', ListType(ComposedType([
- ('previous_output', ComposedType([
- ('hash', HashType()),
- ('index', StructType('<I')),
- ])),
- ('script', VarStrType()),
- ('sequence', StructType('<I')),
- ]))),
- ('tx_outs', ListType(ComposedType([
- ('value', StructType('<Q')),
- ('script', VarStrType()),
- ]))),
- ('lock_time', StructType('<I')),
-])
-
-block_header_type = ComposedType([
- ('version', StructType('<I')),
- ('previous_block', HashType()),
- ('merkle_root', HashType()),
- ('timestamp', StructType('<I')),
- ('bits', StructType('<I')),
- ('nonce', StructType('<I')),
-])
-
-block_type = ComposedType([
- ('header', block_header_type),
- ('txs', ListType(tx_type)),
-])
-
-def doublesha(data):
- return HashType().unpack(hashlib.sha256(hashlib.sha256(data).digest()).digest())
-
-def ripemdsha(data):
- return ShortHashType().unpack(hashlib.new('ripemd160', hashlib.sha256(data).digest()).digest())
-
-merkle_record_type = ComposedType([
- ('left', HashType()),
- ('right', HashType()),
-])
-
-def merkle_hash(tx_list):
- hash_list = [doublesha(tx_type.pack(tx)) for tx in tx_list]
- while len(hash_list) > 1:
- hash_list = [doublesha(merkle_record_type.pack(dict(left=left, right=left if right is None else right)))
- for left, right in zip(hash_list[::2], hash_list[1::2] + [None])]
- return hash_list[0]
-
-def tx_hash(tx):
- return doublesha(tx_type.pack(tx))
-
-def block_hash(header):
- return doublesha(block_header.pack(header))
-
-class BaseProtocol(protocol.Protocol):
- def connectionMade(self):
- self.dataReceived = util.DataChunker(self.dataReceiver())
-
- def dataReceiver(self):
- while True:
- start = ''
- while start != self._prefix:
- start = (start + (yield 1))[-len(self._prefix):]
-
- command = (yield 12).rstrip('\0')
- length, = struct.unpack('<I', (yield 4))
-
- if self.use_checksum:
- checksum = yield 4
- else:
- checksum = None
-
- payload = yield length
-
- if checksum is not None:
- if hashlib.sha256(hashlib.sha256(payload).digest()).digest()[:4] != checksum:
- print 'RECV', command, checksum.encode('hex') if checksum is not None else None, repr(payload.encode('hex')), len(payload)
- print 'INVALID HASH'
- continue
-
- type_ = self.message_types.get(command, None)
- if type_ is None:
- print 'RECV', command, checksum.encode('hex') if checksum is not None else None, repr(payload.encode('hex')), len(payload)
- print 'NO TYPE FOR', repr(command)
- continue
-
- try:
- payload2 = type_.unpack(payload)
- except:
- print 'RECV', command, checksum.encode('hex') if checksum is not None else None, repr(payload.encode('hex')), len(payload)
- traceback.print_exc()
- continue
-
- handler = getattr(self, 'handle_' + command, None)
- if handler is None:
- print 'RECV', command, checksum.encode('hex') if checksum is not None else None, repr(payload.encode('hex')), len(payload)
- print 'NO HANDLER FOR', command
- continue
-
- #print 'RECV', command, payload2
-
- try:
- handler(**payload2)
- except:
- print 'RECV', command, checksum.encode('hex') if checksum is not None else None, repr(payload.encode('hex')), len(payload)
- traceback.print_exc()
- continue
-
- def sendPacket(self, command, payload2={}):
- payload = self.message_types[command].pack(payload2)
- if len(command) >= 12:
- raise ValueError('command too long')
- if self.use_checksum:
- checksum = hashlib.sha256(hashlib.sha256(payload).digest()).digest()[:4]
- else:
- checksum = ''
- data = self._prefix + struct.pack('<12sI', command, len(payload)) + checksum + payload
- self.transport.write(data)
- #print 'SEND', command, payload2
-
- def __getattr__(self, attr):
- prefix = 'send_'
- if attr.startswith(prefix):
- command = attr[len(prefix):]
- return lambda **payload2: self.sendPacket(command, payload2)
- #return protocol.Protocol.__getattr__(self, attr)
- raise AttributeError(attr)
-
-class Protocol(BaseProtocol):
- def __init__(self, testnet=False):
- if testnet:
- self._prefix = 'fabfb5da'.decode('hex')
- else:
- self._prefix = 'f9beb4d9'.decode('hex')
-
- version = 0
-
- @property
- def use_checksum(self):
- return self.version >= 209
-
- message_types = {
- 'version': ComposedType([
- ('version', StructType('<I')),
- ('services', StructType('<Q')),
- ('time', StructType('<Q')),
- ('addr_to', address_type),
- ('addr_from', address_type),
- ('nonce', StructType('<Q')),
- ('sub_version_num', VarStrType()),
- ('start_height', StructType('<I')),
- ]),
- 'verack': ComposedType([]),
- 'addr': ComposedType([
- ('addrs', ListType(ComposedType([
- ('timestamp', StructType('<I')),
- ('address', address_type),
- ]))),
- ]),
- 'inv': ComposedType([
- ('invs', ListType(ComposedType([
- ('type', EnumType(StructType('<I'), {'tx': 1, 'block': 2})),
- ('hash', HashType()),
- ]))),
- ]),
- 'getdata': ComposedType([
- ('requests', ListType(ComposedType([
- ('type', EnumType(StructType('<I'), {'tx': 1, 'block': 2})),
- ('hash', HashType()),
- ]))),
- ]),
- 'getblocks': ComposedType([
- ('version', StructType('<I')),
- ('have', ListType(HashType())),
- ('last', HashType()),
- ]),
- 'getheaders': ComposedType([
- ('version', StructType('<I')),
- ('have', ListType(HashType())),
- ('last', HashType()),
- ]),
- 'tx': ComposedType([
- ('tx', tx_type),
- ]),
- 'block': ComposedType([
- ('block', block_type),
- ]),
- 'headers': ComposedType([
- ('headers', ListType(block_header_type)),
- ]),
- 'getaddr': ComposedType([]),
- 'checkorder': ComposedType([
- ('id', HashType()),
- ('order', FixedStrType(60)), # XXX
- ]),
- 'submitorder': ComposedType([
- ('id', HashType()),
- ('order', FixedStrType(60)), # XXX
- ]),
- 'reply': ComposedType([
- ('hash', HashType()),
- ('reply', EnumType(StructType('<I'), {'success': 0, 'failure': 1, 'denied': 2})),
- ('script', VarStrType()),
- ]),
- 'ping': ComposedType([]),
- 'alert': ComposedType([
- ('message', VarStrType()),
- ('signature', VarStrType()),
- ]),
- }
-
- null_order = '\0'*60
-
- def connectionMade(self):
- BaseProtocol.connectionMade(self)
-
- self.send_version(
- version=32200,
- services=1,
- time=int(time.time()),
- addr_to=dict(
- services=1,
- address='::ffff:' + self.transport.getPeer().host,
- port=self.transport.getPeer().port,
- ),
- addr_from=dict(
- services=1,
- address='::ffff:' + self.transport.getHost().host,
- port=self.transport.getHost().port,
- ),
- nonce=random.randrange(2**64),
- sub_version_num='',
- start_height=0,
- )
-
- def handle_version(self, version, services, time, addr_to, addr_from, nonce, sub_version_num, start_height):
- #print 'VERSION', locals()
- self.version_after = version
- self.send_verack()
-
- def handle_verack(self):
- self.version = self.version_after
-
- # connection ready
- self.check_order = util.GenericDeferrer(2**256, lambda id, order: self.send_checkorder(id=id, order=order))
- self.submit_order = util.GenericDeferrer(2**256, lambda id, order: self.send_submitorder(id=id, order=order))
- self.get_block = util.ReplyMatcher(lambda hash: self.send_getdata(requests=[dict(type='block', hash=hash)]))
- self.get_block_header = util.ReplyMatcher(lambda hash: self.send_getdata(requests=[dict(type='block', hash=hash)]))
-
- if hasattr(self.factory, 'resetDelay'):
- self.factory.resetDelay()
- if hasattr(self.factory, 'gotConnection'):
- self.factory.gotConnection(self)
-
- def handle_inv(self, invs):
- for inv in invs:
- #print 'INV', item['type'], hex(item['hash'])
- self.send_getdata(requests=[inv])
-
- def handle_addr(self, addrs):
- for addr in addrs:
- pass#print 'ADDR', addr
-
- def handle_reply(self, hash, reply, script):
- self.check_order.got_response(hash, dict(reply=reply, script=script))
- self.submit_order.got_response(hash, dict(reply=reply, script=script))
-
- def handle_tx(self, tx):
- #print 'TX', hex(merkle_hash([tx])), tx
- self.factory.new_tx.happened(tx)
-
- def handle_block(self, block):
- self.get_block.got_response(block_hash(block['header']), block)
- self.factory.new_block.happened(block)
-
- def handle_ping(self):
- pass
-
- def connectionLost(self, reason):
- if hasattr(self.factory, 'gotConnection'):
- self.factory.gotConnection(None)
-
-class ClientFactory(protocol.ReconnectingClientFactory):
- protocol = Protocol
-
- maxDelay = 15
-
- def __init__(self, testnet=False):
- self.testnet = testnet
- self.conn = util.Variable(None)
-
- self.new_block = util.Event()
- self.new_tx = util.Event()
-
- def buildProtocol(self, addr):
- p = self.protocol(self.testnet)
- p.factory = self
- return p
-
- def gotConnection(self, conn):
- self.conn.set(conn)
- self.conn = conn
-
- def getProtocol(self):
- return self.conn.get_not_none()
-
-if __name__ == '__main__':
- factory = ClientFactory()
- reactor.connectTCP('127.0.0.1', 8333, factory)
-
- reactor.run()
from __future__ import division
import os
-import sys
import sqlite3
import db
--- /dev/null
+import struct
+import StringIO
+import hashlib
+
+class EarlyEnd(Exception):
+ pass
+
+class LateEnd(Exception):
+ pass
+
+class Type(object):
+ # the same data can have only one unpacked representation, but multiple packed binary representations
+
+ def _unpack(self, data):
+ f = StringIO.StringIO(data)
+
+ obj = self.read(f)
+
+ if f.tell() != len(data):
+ raise LateEnd('underread ' + repr((self, data)))
+
+ return obj
+
+ def unpack(self, data):
+ obj = self._unpack(data)
+ assert self._unpack(self._pack(obj)) == obj
+ return obj
+
+ def _pack(self, obj):
+ f = StringIO.StringIO()
+
+ self.write(f, obj)
+
+ data = f.getvalue()
+
+ return data
+
+ def pack(self, obj):
+ data = self._pack(obj)
+ assert self._unpack(data) == obj
+ return data
+
+class VarIntType(Type):
+ def read(self, file):
+ data = file.read(1)
+ if len(data) != 1:
+ raise EarlyEnd()
+ first, = struct.unpack('<B', data)
+ if first == 0xff: desc = '<Q'
+ elif first == 0xfe: desc = '<I'
+ elif first == 0xfd: desc = '<H'
+ else: return first
+ length = struct.calcsize(desc)
+ data = file.read(length)
+ if len(data) != length:
+ raise EarlyEnd()
+ return struct.unpack(desc, data)[0]
+
+ def write(self, file, item):
+ if item < 0xfd:
+ file.write(struct.pack('<B', item))
+ elif item <= 0xffff:
+ file.write(struct.pack('<BH', 0xfd, item))
+ elif item <= 0xffffffff:
+ file.write(struct.pack('<BI', 0xfe, item))
+ elif item <= 0xffffffffffffffff:
+ file.write(struct.pack('<BQ', 0xff, item))
+ else:
+ raise ValueError('int too large for varint')
+
+class VarStrType(Type):
+ def read(self, file):
+ length = VarIntType().read(file)
+ res = file.read(length)
+ if len(res) != length:
+ raise EarlyEnd('var str not long enough %r' % ((length, len(res), res),))
+ return res
+
+ def write(self, file, item):
+ VarIntType().write(file, len(item))
+ file.write(item)
+
+class FixedStrType(Type):
+ def __init__(self, length):
+ self.length = length
+
+ def read(self, file):
+ res = file.read(self.length)
+ if len(res) != self.length:
+ raise EarlyEnd('early EOF!')
+ return res
+
+ def write(self, file, item):
+ if len(item) != self.length:
+ raise ValueError('incorrect length!')
+ file.write(item)
+
+class EnumType(Type):
+ def __init__(self, inner, values):
+ self.inner = inner
+ self.values = values
+
+ self.keys = {}
+ for k, v in values.iteritems():
+ if v in self.keys:
+ raise ValueError('duplicate value in values')
+ self.keys[v] = k
+
+ def read(self, file):
+ return self.keys[self.inner.read(file)]
+
+ def write(self, file, item):
+ self.inner.write(file, self.values[item])
+
+class HashType(Type):
+ def read(self, file):
+ data = file.read(256//8)
+ if len(data) != 256//8:
+ raise EarlyEnd('incorrect length!')
+ return int(data[::-1].encode('hex'), 16)
+
+ def write(self, file, item):
+ file.write(('%064x' % (item,)).decode('hex')[::-1])
+
+class ShortHashType(Type):
+ def read(self, file):
+ data = file.read(160//8)
+ if len(data) != 160//8:
+ raise EarlyEnd('incorrect length!')
+ return int(data[::-1].encode('hex'), 16)
+
+ def write(self, file, item):
+ file.write(('%020x' % (item,)).decode('hex')[::-1])
+
+class ListType(Type):
+ def __init__(self, type):
+ self.type = type
+
+ def read(self, file):
+ length = VarIntType().read(file)
+ return [self.type.read(file) for i in xrange(length)]
+
+ def write(self, file, item):
+ VarIntType().write(file, len(item))
+ for subitem in item:
+ self.type.write(file, subitem)
+
+class StructType(Type):
+ def __init__(self, desc):
+ self.desc = desc
+ self.length = struct.calcsize(self.desc)
+
+ def read(self, file):
+ data = file.read(self.length)
+ if len(data) != self.length:
+ raise EarlyEnd()
+ res, = struct.unpack(self.desc, data)
+ return res
+
+ def write(self, file, item):
+ data = struct.pack(self.desc, item)
+ if struct.unpack(self.desc, data)[0] != item:
+ # special test because struct doesn't error on some overflows
+ raise ValueError("item didn't survive pack cycle (%r)" % (item,))
+ file.write(data)
+
+class IPV6AddressType(Type):
+ def read(self, file):
+ data = file.read(16)
+ if len(data) != 16:
+ raise EarlyEnd()
+ if data[:12] != '00000000000000000000ffff'.decode('hex'):
+ raise ValueError("ipv6 addresses not supported yet")
+ return '::ffff:' + '.'.join(str(ord(x)) for x in data[12:])
+
+ def write(self, file, item):
+ prefix = '::ffff:'
+ if not item.startswith(prefix):
+ raise ValueError("ipv6 addresses not supported yet")
+ item = item[len(prefix):]
+ bits = map(int, item.split('.'))
+ if len(bits) != 4:
+ raise ValueError("invalid address: %r" % (bits,))
+ data = '00000000000000000000ffff'.decode('hex') + ''.join(chr(x) for x in bits)
+ assert len(data) == 16, len(data)
+ file.write(data)
+
+class ComposedType(Type):
+ def __init__(self, fields):
+ self.fields = fields
+
+ def read(self, file):
+ item = {}
+ for key, type_ in self.fields:
+ item[key] = type_.read(file)
+ return item
+
+ def write(self, file, item):
+ for key, type_ in self.fields:
+ type_.write(file, item[key])
+
+address_type = ComposedType([
+ ('services', StructType('<Q')),
+ ('address', IPV6AddressType()),
+ ('port', StructType('>H')),
+])
+
+tx_type = ComposedType([
+ ('version', StructType('<I')),
+ ('tx_ins', ListType(ComposedType([
+ ('previous_output', ComposedType([
+ ('hash', HashType()),
+ ('index', StructType('<I')),
+ ])),
+ ('script', VarStrType()),
+ ('sequence', StructType('<I')),
+ ]))),
+ ('tx_outs', ListType(ComposedType([
+ ('value', StructType('<Q')),
+ ('script', VarStrType()),
+ ]))),
+ ('lock_time', StructType('<I')),
+])
+
+block_header_type = ComposedType([
+ ('version', StructType('<I')),
+ ('previous_block', HashType()),
+ ('merkle_root', HashType()),
+ ('timestamp', StructType('<I')),
+ ('bits', StructType('<I')),
+ ('nonce', StructType('<I')),
+])
+
+block_type = ComposedType([
+ ('header', block_header_type),
+ ('txs', ListType(tx_type)),
+])
+
+def doublesha(data):
+ return HashType().unpack(hashlib.sha256(hashlib.sha256(data).digest()).digest())
+
+def ripemdsha(data):
+ return ShortHashType().unpack(hashlib.new('ripemd160', hashlib.sha256(data).digest()).digest())
+
+merkle_record_type = ComposedType([
+ ('left', HashType()),
+ ('right', HashType()),
+])
+
+def merkle_hash(tx_list):
+ hash_list = [doublesha(tx_type.pack(tx)) for tx in tx_list]
+ while len(hash_list) > 1:
+ hash_list = [doublesha(merkle_record_type.pack(dict(left=left, right=left if right is None else right)))
+ for left, right in zip(hash_list[::2], hash_list[1::2] + [None])]
+ return hash_list[0]
+
+def tx_hash(tx):
+ return doublesha(tx_type.pack(tx))
+
+def block_hash(header):
+ return doublesha(block_header_type.pack(header))
+
+class EarlyEnd(Exception):
+ pass
+
+class LateEnd(Exception):
+ pass
+
+class Type(object):
+ # the same data can have only one unpacked representation, but multiple packed binary representations
+
+ def _unpack(self, data):
+ f = StringIO.StringIO(data)
+
+ obj = self.read(f)
+
+ if f.tell() != len(data):
+ raise LateEnd('underread ' + repr((self, data)))
+
+ return obj
+
+ def unpack(self, data):
+ obj = self._unpack(data)
+ assert self._unpack(self._pack(obj)) == obj
+ return obj
+
+ def _pack(self, obj):
+ f = StringIO.StringIO()
+
+ self.write(f, obj)
+
+ data = f.getvalue()
+
+ return data
+
+ def pack(self, obj):
+ data = self._pack(obj)
+ assert self._unpack(data) == obj
+ return data
+
+class VarIntType(Type):
+ def read(self, file):
+ data = file.read(1)
+ if len(data) != 1:
+ raise EarlyEnd()
+ first, = struct.unpack('<B', data)
+ if first == 0xff: desc = '<Q'
+ elif first == 0xfe: desc = '<I'
+ elif first == 0xfd: desc = '<H'
+ else: return first
+ length = struct.calcsize(desc)
+ data = file.read(length)
+ if len(data) != length:
+ raise EarlyEnd()
+ return struct.unpack(desc, data)[0]
+
+ def write(self, file, item):
+ if item < 0xfd:
+ file.write(struct.pack('<B', item))
+ elif item <= 0xffff:
+ file.write(struct.pack('<BH', 0xfd, item))
+ elif item <= 0xffffffff:
+ file.write(struct.pack('<BI', 0xfe, item))
+ elif item <= 0xffffffffffffffff:
+ file.write(struct.pack('<BQ', 0xff, item))
+ else:
+ raise ValueError('int too large for varint')
+
+class VarStrType(Type):
+ def read(self, file):
+ length = VarIntType().read(file)
+ res = file.read(length)
+ if len(res) != length:
+ raise EarlyEnd('var str not long enough %r' % ((length, len(res), res),))
+ return res
+
+ def write(self, file, item):
+ VarIntType().write(file, len(item))
+ file.write(item)
+
+class FixedStrType(Type):
+ def __init__(self, length):
+ self.length = length
+
+ def read(self, file):
+ res = file.read(self.length)
+ if len(res) != self.length:
+ raise EarlyEnd('early EOF!')
+ return res
+
+ def write(self, file, item):
+ if len(item) != self.length:
+ raise ValueError('incorrect length!')
+ file.write(item)
+
+class EnumType(Type):
+ def __init__(self, inner, values):
+ self.inner = inner
+ self.values = values
+
+ self.keys = {}
+ for k, v in values.iteritems():
+ if v in self.keys:
+ raise ValueError('duplicate value in values')
+ self.keys[v] = k
+
+ def read(self, file):
+ return self.keys[self.inner.read(file)]
+
+ def write(self, file, item):
+ self.inner.write(file, self.values[item])
+
+class HashType(Type):
+ def read(self, file):
+ data = file.read(256//8)
+ if len(data) != 256//8:
+ raise EarlyEnd('incorrect length!')
+ return int(data[::-1].encode('hex'), 16)
+
+ def write(self, file, item):
+ file.write(('%064x' % (item,)).decode('hex')[::-1])
+
+class ShortHashType(Type):
+ def read(self, file):
+ data = file.read(160//8)
+ if len(data) != 160//8:
+ raise EarlyEnd('incorrect length!')
+ return int(data[::-1].encode('hex'), 16)
+
+ def write(self, file, item):
+ file.write(('%020x' % (item,)).decode('hex')[::-1])
+
+class ListType(Type):
+ def __init__(self, type):
+ self.type = type
+
+ def read(self, file):
+ length = VarIntType().read(file)
+ return [self.type.read(file) for i in xrange(length)]
+
+ def write(self, file, item):
+ VarIntType().write(file, len(item))
+ for subitem in item:
+ self.type.write(file, subitem)
+
+class StructType(Type):
+ def __init__(self, desc):
+ self.desc = desc
+ self.length = struct.calcsize(self.desc)
+
+ def read(self, file):
+ data = file.read(self.length)
+ if len(data) != self.length:
+ raise EarlyEnd()
+ res, = struct.unpack(self.desc, data)
+ return res
+
+ def write(self, file, item):
+ data = struct.pack(self.desc, item)
+ if struct.unpack(self.desc, data)[0] != item:
+ # special test because struct doesn't error on some overflows
+ raise ValueError("item didn't survive pack cycle (%r)" % (item,))
+ file.write(data)
+
+class IPV6AddressType(Type):
+ def read(self, file):
+ data = file.read(16)
+ if len(data) != 16:
+ raise EarlyEnd()
+ if data[:12] != '00000000000000000000ffff'.decode('hex'):
+ raise ValueError("ipv6 addresses not supported yet")
+ return '::ffff:' + '.'.join(str(ord(x)) for x in data[12:])
+
+ def write(self, file, item):
+ prefix = '::ffff:'
+ if not item.startswith(prefix):
+ raise ValueError("ipv6 addresses not supported yet")
+ item = item[len(prefix):]
+ bits = map(int, item.split('.'))
+ if len(bits) != 4:
+ raise ValueError("invalid address: %r" % (bits,))
+ data = '00000000000000000000ffff'.decode('hex') + ''.join(chr(x) for x in bits)
+ assert len(data) == 16, len(data)
+ file.write(data)
+
+class ComposedType(Type):
+ def __init__(self, fields):
+ self.fields = fields
+
+ def read(self, file):
+ item = {}
+ for key, type_ in self.fields:
+ item[key] = type_.read(file)
+ return item
+
+ def write(self, file, item):
+ for key, type_ in self.fields:
+ type_.write(file, item[key])
+
+address_type = ComposedType([
+ ('services', StructType('<Q')),
+ ('address', IPV6AddressType()),
+ ('port', StructType('>H')),
+])
+
+tx_type = ComposedType([
+ ('version', StructType('<I')),
+ ('tx_ins', ListType(ComposedType([
+ ('previous_output', ComposedType([
+ ('hash', HashType()),
+ ('index', StructType('<I')),
+ ])),
+ ('script', VarStrType()),
+ ('sequence', StructType('<I')),
+ ]))),
+ ('tx_outs', ListType(ComposedType([
+ ('value', StructType('<Q')),
+ ('script', VarStrType()),
+ ]))),
+ ('lock_time', StructType('<I')),
+])
+
+block_header_type = ComposedType([
+ ('version', StructType('<I')),
+ ('previous_block', HashType()),
+ ('merkle_root', HashType()),
+ ('timestamp', StructType('<I')),
+ ('bits', StructType('<I')),
+ ('nonce', StructType('<I')),
+])
+
+block_type = ComposedType([
+ ('header', block_header_type),
+ ('txs', ListType(tx_type)),
+])
+
+def doublesha(data):
+ return HashType().unpack(hashlib.sha256(hashlib.sha256(data).digest()).digest())
+
+def ripemdsha(data):
+ return ShortHashType().unpack(hashlib.new('ripemd160', hashlib.sha256(data).digest()).digest())
+
+merkle_record_type = ComposedType([
+ ('left', HashType()),
+ ('right', HashType()),
+])
+
+def merkle_hash(tx_list):
+ hash_list = [doublesha(tx_type.pack(tx)) for tx in tx_list]
+ while len(hash_list) > 1:
+ hash_list = [doublesha(merkle_record_type.pack(dict(left=left, right=left if right is None else right)))
+ for left, right in zip(hash_list[::2], hash_list[1::2] + [None])]
+ return hash_list[0]
+
+def tx_hash(tx):
+ return doublesha(tx_type.pack(tx))
+
+def block_hash(header):
+ return doublesha(block_header_type.pack(header))
+
+def bits_to_target(bits):
+ return (bits & 0x00ffffff) * 2 ** (8 * ((bits >> 24) - 3))
+
+def target_to_average_attempts(target):
+ return 2**256//(target + 1)
import struct
-import sha256
+from . import data as bitcoin_data
+from . import sha256
-def bits_to_target(bits):
- return (bits & 0x00ffffff) * 2 ** (8 * ((bits >> 24) - 3))
-
-def target_to_attempts(target):
- return (2**257 + target + 1)//(2*target + 2)
-
-def reverse_chunks(s, l):
+def _reverse_chunks(s, l):
return ''.join(reversed([s[x:x+l] for x in xrange(0, len(s), l)]))
class BlockAttempt(object):
def __init__(self, version, previous_block, merkle_root, timestamp, bits):
self.version, self.previous_block, self.merkle_root, self.timestamp, self.bits = version, previous_block, merkle_root, timestamp, bits
+ def __hash__(self):
+ return hash((self.version, self.previous_block, self.merkle_root, self.timestamp, self.bits))
+
def __repr__(self):
return '<BlockAttempt %s>' % (' '.join('%s=%s' % (k, hex(v))) for k, v in self.__dict__.iteritems())
return 'BlockAttempt(%s)' % (', '.join('%s=%r' % (k, v) for k, v in self.__dict__.iteritems()),)
def getwork(self, target_multiplier=1, _check=2):
- target = bits_to_target(self.bits) * target_multiplier
+ target = bitcoin_data.bits_to_target(self.bits) * target_multiplier
if target >= 2**256//2**32:
raise ValueError("target higher than standard maximum")
- previous_block2 = reverse_chunks('%064x' % self.previous_block, 8).decode('hex')
- merkle_root2 = reverse_chunks('%064x' % self.merkle_root, 8).decode('hex')
+ previous_block2 = _reverse_chunks('%064x' % self.previous_block, 8).decode('hex')
+ merkle_root2 = _reverse_chunks('%064x' % self.merkle_root, 8).decode('hex')
data = struct.pack('>I32s32sIII', self.version, previous_block2, merkle_root2, self.timestamp, self.bits, 0).encode('hex') + '000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000'
previous_block3 = ('%064x' % self.previous_block).decode('hex')[::-1]
'data': data,
'hash1': '00000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000010000',
'target': ('%064x' % (target,)).decode('hex')[::-1].encode('hex'),
- 'midstate': reverse_chunks(sha256.process(data2[:64])[::-1], 4).encode('hex'),
+ 'midstate': _reverse_chunks(sha256.process(data2[:64])[::-1], 4).encode('hex'),
}
if _check:
def decode_data(data):
version, previous_block, merkle_root, timestamp, bits, nonce = struct.unpack('>I32s32sIII', data[:160].decode('hex'))
- previous_block = int(reverse_chunks(previous_block.encode('hex'), 8), 16)
- merkle_root = int(reverse_chunks(merkle_root.encode('hex'), 8), 16)
+ previous_block = int(_reverse_chunks(previous_block.encode('hex'), 8), 16)
+ merkle_root = int(_reverse_chunks(merkle_root.encode('hex'), 8), 16)
return dict(version=version, previous_block=previous_block, merkle_root=merkle_root, timestamp=timestamp, bits=bits, nonce=nonce)
if __name__ == '__main__':
--- /dev/null
+'''
+Implementation of Bitcoin's p2p protocol
+'''
+
+from __future__ import division
+
+import hashlib
+import random
+import struct
+import time
+import traceback
+
+from twisted.internet import protocol, reactor
+
+from . import data as bitcoin_data
+import p2pool.util
+
+class BaseProtocol(protocol.Protocol):
+ def connectionMade(self):
+ self.dataReceived = p2pool.util.DataChunker(self.dataReceiver())
+
+ def dataReceiver(self):
+ while True:
+ start = ''
+ while start != self._prefix:
+ start = (start + (yield 1))[-len(self._prefix):]
+
+ command = (yield 12).rstrip('\0')
+ length, = struct.unpack('<I', (yield 4))
+
+ if self.use_checksum:
+ checksum = yield 4
+ else:
+ checksum = None
+
+ payload = yield length
+
+ if checksum is not None:
+ if hashlib.sha256(hashlib.sha256(payload).digest()).digest()[:4] != checksum:
+ print 'RECV', command, checksum.encode('hex') if checksum is not None else None, repr(payload.encode('hex')), len(payload)
+ print 'INVALID HASH'
+ continue
+
+ type_ = getattr(self, "message_" + command, None)
+ if type_ is None:
+ print 'RECV', command, checksum.encode('hex') if checksum is not None else None, repr(payload.encode('hex')), len(payload)
+ print 'NO TYPE FOR', repr(command)
+ continue
+
+ try:
+ payload2 = type_.unpack(payload)
+ except:
+ print 'RECV', command, checksum.encode('hex') if checksum is not None else None, repr(payload.encode('hex')), len(payload)
+ traceback.print_exc()
+ continue
+
+ handler = getattr(self, 'handle_' + command, None)
+ if handler is None:
+ print 'RECV', command, checksum.encode('hex') if checksum is not None else None, repr(payload.encode('hex')), len(payload)
+ print 'NO HANDLER FOR', command
+ continue
+
+ #print 'RECV', command, payload2
+
+ try:
+ handler(**payload2)
+ except:
+ print 'RECV', command, checksum.encode('hex') if checksum is not None else None, repr(payload.encode('hex')), len(payload)
+ traceback.print_exc()
+ continue
+
+ def sendPacket(self, command, payload2={}):
+ type_ = getattr(self, "message_" + command, None)
+ if type_ is None:
+ raise ValueError('invalid command')
+ payload = type_.pack(payload2)
+ if len(command) >= 12:
+ raise ValueError('command too long')
+ if self.use_checksum:
+ checksum = hashlib.sha256(hashlib.sha256(payload).digest()).digest()[:4]
+ else:
+ checksum = ''
+ data = self._prefix + struct.pack('<12sI', command, len(payload)) + checksum + payload
+ self.transport.write(data)
+ #print 'SEND', command, payload2
+
+ def __getattr__(self, attr):
+ prefix = 'send_'
+ if attr.startswith(prefix):
+ command = attr[len(prefix):]
+ return lambda **payload2: self.sendPacket(command, payload2)
+ #return protocol.Protocol.__getattr__(self, attr)
+ raise AttributeError(attr)
+
+class Protocol(BaseProtocol):
+ def __init__(self, testnet=False):
+ if testnet:
+ self._prefix = 'fabfb5da'.decode('hex')
+ else:
+ self._prefix = 'f9beb4d9'.decode('hex')
+
+ version = 0
+
+ @property
+ def use_checksum(self):
+ return self.version >= 209
+
+ message_version = bitcoin_data.ComposedType([
+ ('version', bitcoin_data.StructType('<I')),
+ ('services', bitcoin_data.StructType('<Q')),
+ ('time', bitcoin_data.StructType('<Q')),
+ ('addr_to', bitcoin_data.address_type),
+ ('addr_from', bitcoin_data.address_type),
+ ('nonce', bitcoin_data.StructType('<Q')),
+ ('sub_version_num', bitcoin_data.VarStrType()),
+ ('start_height', bitcoin_data.StructType('<I')),
+ ])
+ message_verack = bitcoin_data.ComposedType([])
+ message_addr = bitcoin_data.ComposedType([
+ ('addrs', bitcoin_data.ListType(bitcoin_data.ComposedType([
+ ('timestamp', bitcoin_data.StructType('<I')),
+ ('address', bitcoin_data.address_type),
+ ]))),
+ ])
+ message_inv = bitcoin_data.ComposedType([
+ ('invs', bitcoin_data.ListType(bitcoin_data.ComposedType([
+ ('type', bitcoin_data.EnumType(bitcoin_data.StructType('<I'), {'tx': 1, 'block': 2})),
+ ('hash', bitcoin_data.HashType()),
+ ]))),
+ ])
+ message_getdata = bitcoin_data.ComposedType([
+ ('requests', bitcoin_data.ListType(bitcoin_data.ComposedType([
+ ('type', bitcoin_data.EnumType(bitcoin_data.StructType('<I'), {'tx': 1, 'block': 2})),
+ ('hash', bitcoin_data.HashType()),
+ ]))),
+ ])
+ message_getblocks = bitcoin_data.ComposedType([
+ ('version', bitcoin_data.StructType('<I')),
+ ('have', bitcoin_data.ListType(bitcoin_data.HashType())),
+ ('last', bitcoin_data.HashType()),
+ ])
+ message_getheaders = bitcoin_data.ComposedType([
+ ('version', bitcoin_data.StructType('<I')),
+ ('have', bitcoin_data.ListType(bitcoin_data.HashType())),
+ ('last', bitcoin_data.HashType()),
+ ])
+ message_tx = bitcoin_data.ComposedType([
+ ('tx', bitcoin_data.tx_type),
+ ])
+ message_block = bitcoin_data.ComposedType([
+ ('block', bitcoin_data.block_type),
+ ])
+ message_headers = bitcoin_data.ComposedType([
+ ('headers', bitcoin_data.ListType(bitcoin_data.block_header_type)),
+ ])
+ message_getaddr = bitcoin_data.ComposedType([])
+ message_checkorder = bitcoin_data.ComposedType([
+ ('id', bitcoin_data.HashType()),
+ ('order', bitcoin_data.FixedStrType(60)), # XXX
+ ])
+ message_submitorder = bitcoin_data.ComposedType([
+ ('id', bitcoin_data.HashType()),
+ ('order', bitcoin_data.FixedStrType(60)), # XXX
+ ])
+ message_reply = bitcoin_data.ComposedType([
+ ('hash', bitcoin_data.HashType()),
+ ('reply', bitcoin_data.EnumType(bitcoin_data.StructType('<I'), {'success': 0, 'failure': 1, 'denied': 2})),
+ ('script', bitcoin_data.VarStrType()),
+ ])
+ message_ping = bitcoin_data.ComposedType([])
+ message_alert = bitcoin_data.ComposedType([
+ ('message', bitcoin_data.VarStrType()),
+ ('signature', bitcoin_data.VarStrType()),
+ ])
+
+ null_order = '\0'*60
+
+ def connectionMade(self):
+ BaseProtocol.connectionMade(self)
+
+ self.send_version(
+ version=32200,
+ services=1,
+ time=int(time.time()),
+ addr_to=dict(
+ services=1,
+ address='::ffff:' + self.transport.getPeer().host,
+ port=self.transport.getPeer().port,
+ ),
+ addr_from=dict(
+ services=1,
+ address='::ffff:' + self.transport.getHost().host,
+ port=self.transport.getHost().port,
+ ),
+ nonce=random.randrange(2**64),
+ sub_version_num='',
+ start_height=0,
+ )
+
+ def handle_version(self, version, services, time, addr_to, addr_from, nonce, sub_version_num, start_height):
+ #print 'VERSION', locals()
+ self.version_after = version
+ self.send_verack()
+
+ def handle_verack(self):
+ self.version = self.version_after
+
+ # connection ready
+ self.check_order = p2pool.util.GenericDeferrer(2**256, lambda id, order: self.send_checkorder(id=id, order=order))
+ self.submit_order = p2pool.util.GenericDeferrer(2**256, lambda id, order: self.send_submitorder(id=id, order=order))
+ self.get_block = p2pool.util.ReplyMatcher(lambda hash: self.send_getdata(requests=[dict(type='block', hash=hash)]))
+ self.get_block_header = p2pool.util.ReplyMatcher(lambda hash: self.send_getdata(requests=[dict(type='block', hash=hash)]))
+
+ if hasattr(self.factory, 'resetDelay'):
+ self.factory.resetDelay()
+ if hasattr(self.factory, 'gotConnection'):
+ self.factory.gotConnection(self)
+
+ def handle_inv(self, invs):
+ for inv in invs:
+ #print 'INV', item['type'], hex(item['hash'])
+ self.send_getdata(requests=[inv])
+
+ def handle_addr(self, addrs):
+ for addr in addrs:
+ pass#print 'ADDR', addr
+
+ def handle_reply(self, hash, reply, script):
+ self.check_order.got_response(hash, dict(reply=reply, script=script))
+ self.submit_order.got_response(hash, dict(reply=reply, script=script))
+
+ def handle_tx(self, tx):
+ #print 'TX', hex(merkle_hash([tx])), tx
+ self.factory.new_tx.happened(tx)
+
+ def handle_block(self, block):
+ self.get_block.got_response(bitcoin_data.block_hash(block['header']), block)
+ self.factory.new_block.happened(block)
+
+ def handle_ping(self):
+ pass
+
+ def connectionLost(self, reason):
+ if hasattr(self.factory, 'gotConnection'):
+ self.factory.gotConnection(None)
+
+class ClientFactory(protocol.ReconnectingClientFactory):
+ protocol = Protocol
+
+ maxDelay = 15
+
+ def __init__(self, testnet=False):
+ self.testnet = testnet
+ self.conn = p2pool.util.Variable(None)
+
+ self.new_block = p2pool.util.Event()
+ self.new_tx = p2pool.util.Event()
+
+ def buildProtocol(self, addr):
+ p = self.protocol(self.testnet)
+ p.factory = self
+ return p
+
+ def gotConnection(self, conn):
+ self.conn.set(conn)
+
+ def getProtocol(self):
+ return self.conn.get_not_none()
+
+if __name__ == '__main__':
+ factory = ClientFactory()
+ reactor.connectTCP('127.0.0.1', 8333, factory)
+
+ reactor.run()
from __future__ import division
-import bitcoin_p2p
-import conv
+from bitcoin import data as bitcoin_data
-chain_id_type = bitcoin_p2p.ComposedType([
- ('last_p2pool_block_hash', bitcoin_p2p.HashType()),
- ('bits', bitcoin_p2p.StructType('<I')),
+chain_id_type = bitcoin_data.ComposedType([
+ ('last_p2pool_block_hash', bitcoin_data.HashType()),
+ ('bits', bitcoin_data.StructType('<I')),
])
-share_data_type = bitcoin_p2p.ComposedType([
- ('last_p2pool_block_hash', bitcoin_p2p.HashType()),
- ('previous_p2pool_share_hash', bitcoin_p2p.HashType()),
- ('nonce', bitcoin_p2p.StructType('<Q')),
+share_data_type = bitcoin_data.ComposedType([
+ ('last_p2pool_block_hash', bitcoin_data.HashType()),
+ ('previous_p2pool_share_hash', bitcoin_data.HashType()),
+ ('nonce', bitcoin_data.VarStrType()),
])
-coinbase_type = bitcoin_p2p.ComposedType([
- ('identifier', bitcoin_p2p.StructType('<Q')),
+coinbase_type = bitcoin_data.ComposedType([
+ ('identifier', bitcoin_data.StructType('<Q')),
('share_data', share_data_type),
])
-merkle_branch_type = bitcoin_p2p.ListType(bitcoin_p2p.ComposedType([
- ('side', bitcoin_p2p.StructType('<B')),
- ('hash', bitcoin_p2p.HashType()),
+merkle_branch_type = bitcoin_data.ListType(bitcoin_data.ComposedType([
+ ('side', bitcoin_data.StructType('<B')),
+ ('hash', bitcoin_data.HashType()),
]))
-gentx_info_type = bitcoin_p2p.ComposedType([
- ('share_info', bitcoin_p2p.ComposedType([
+gentx_info_type = bitcoin_data.ComposedType([
+ ('share_info', bitcoin_data.ComposedType([
('share_data', share_data_type),
- ('new_script', bitcoin_p2p.VarStrType()),
- ('subsidy', bitcoin_p2p.StructType('<Q')),
+ ('new_script', bitcoin_data.VarStrType()),
+ ('subsidy', bitcoin_data.StructType('<Q')),
])),
('merkle_branch', merkle_branch_type),
])
-share1_type = bitcoin_p2p.ComposedType([
- ('header', bitcoin_p2p.block_header_type),
+share1_type = bitcoin_data.ComposedType([
+ ('header', bitcoin_data.block_header_type),
('gentx_info', gentx_info_type),
])
-def calculate_merkle_branch(tx_list, index):
- hash_list = [(bitcoin_p2p.doublesha(bitcoin_p2p.tx_type.pack(data)), i == index, []) for i, data in enumerate(tx_list)]
+def calculate_merkle_branch(txs, index):
+ hash_list = [(bitcoin_data.tx_hash(tx), i == index, []) for i, tx in enumerate(txs)]
while len(hash_list) > 1:
hash_list = [
(
- bitcoin_p2p.doublesha(bitcoin_p2p.merkle_record_type.pack(dict(left=left, right=right))),
+ bitcoin_data.doublesha(bitcoin_data.merkle_record_type.pack(dict(left=left, right=right))),
left_f or right_f,
(left_l if left_f else right_l) + [dict(side=1, hash=right) if left_f else dict(side=0, hash=left)],
)
]
assert hash_list[0][1]
- assert check_merkle_branch(tx_list[index], hash_list[0][2]) == hash_list[0][0]
+ assert check_merkle_branch(txs[index], hash_list[0][2]) == hash_list[0][0]
return hash_list[0][2]
def check_merkle_branch(tx, branch):
- hash_ = bitcoin_p2p.doublesha(bitcoin_p2p.tx_type.pack(tx))
+ hash_ = bitcoin_data.tx_hash(tx)
for step in branch:
if not step['side']:
- hash_ = bitcoin_p2p.doublesha(bitcoin_p2p.merkle_record_type.pack(dict(left=step['hash'], right=hash_)))
+ hash_ = bitcoin_data.doublesha(bitcoin_data.merkle_record_type.pack(dict(left=step['hash'], right=hash_)))
else:
- hash_ = bitcoin_p2p.doublesha(bitcoin_p2p.merkle_record_type.pack(dict(left=hash_, right=step['hash'])))
+ hash_ = bitcoin_data.doublesha(bitcoin_data.merkle_record_type.pack(dict(left=hash_, right=step['hash'])))
return hash_
def txs_to_gentx_info(txs):
class Share(object):
def __init__(self, header, txs=None, gentx_info=None):
if txs is not None:
- if bitcoin_p2p.merkle_hash(txs) != header['merkle_root']:
+ if bitcoin_data.merkle_hash(txs) != header['merkle_root']:
raise ValueError("txs don't match header")
if gentx_info is None:
self.header = header
self.txs = txs
self.gentx_info = gentx_info
- self.hash = bitcoin_p2p.block_hash(header)
+ self.hash = bitcoin_data.block_hash(header)
self.previous_share_hash = coinbase['previous_p2pool_share_hash'] if coinbase['previous_p2pool_share_hash'] != 2**256 - 1 else None
self.chain_id_data = chain_id_type.pack(dict(last_p2pool_block_hash=coinbase['last_p2pool_block_hash'], bits=header['bits']))
def check(self, chain, height, previous_share2, net):
if self.chain_id_data != chain.chain_id_data:
raise ValueError('wrong chain')
- if self.hash > net.TARGET_MULTIPLIER*conv.bits_to_target(self.header['bits']):
+
+ if self.hash > net.TARGET_MULTIPLIER*bitcoin_data.bits_to_target(self.header['bits']):
raise ValueError('not enough work!')
gentx, shares, merkle_root = gentx_info_to_gentx_shares_and_merkle_root(self.gentx_info, chain, net)
def __init__(self, share, shares, height):
self.share = share
- self.shares = map(intern, shares)
+ self.shares = shares
self.height = height
+
self.shared = False
def flag_shared(self):
amounts[net.SCRIPT] = amounts.get(net.SCRIPT, 0) + subsidy//64 # prevent fake previous p2pool blocks
amounts[net.SCRIPT] = amounts.get(net.SCRIPT, 0) + subsidy - sum(amounts.itervalues()) # collect any extra
- dests = sorted(amounts.iterkeys())
- dests.remove(new_script)
- dests = dests + [new_script]
+ dests = sorted(amounts.iterkeys(), key=lambda script: (script == new_script, script))
+ assert dests[-1] == new_script
return dict(
version=1,
), shares
+class Tracker(object):
+ def __init__(self):
+ self.shares = {} # hash -> share
+ self.reverse_shares = {} # previous_hash -> share_hash
+ self.heads = {} # hash -> (height, tail hash)
+ self.heads = set()
+
+ def add_share(self, share):
+ if share.hash in self.shares:
+ return # XXX
+
+ self.shares[share.hash] = share
+ self.reverse_shares.setdefault(share.previous_hash, set()).add(share.hash)
+
+ if self.reverse_shares.get(share.hash, set()):
+ pass # not a head
+ else:
+ self.heads.add(share.hash)
+ if share.previous_hash in self.heads:
+ self.heads.remove(share.previous_hash)
+
+if __name__ == '__main__':
+ class FakeShare(object):
+ def __init__(self, hash, previous_hash):
+ self.hash = hash
+ self.previous_hash = previous_hash
+
+ t = Tracker()
+
+ t.add_share(FakeShare(1, 2))
+ print t.heads
+ t.add_share(FakeShare(4, 0))
+ print t.heads
+ t.add_share(FakeShare(3, 4))
+ print t.heads
+
# TARGET_MULTIPLIER needs to be less than the current difficulty to prevent miner clients from missing shares
class Testnet(object):
ROOT_BLOCK = 0xd5070cd4f2987ad2191af71393731a2b143f094f7b84c9e6aa9a6a
SCRIPT = '410403ad3dee8ab3d8a9ce5dd2abfbe7364ccd9413df1d279bf1a207849310465b0956e5904b1155ecd17574778f9949589ebfd4fb33ce837c241474a225cf08d85dac'.decode('hex')
IDENTIFIER = 0x1ae3479e4eb6700a
- PREFIX= 'd19778c812754854'.decode('hex')
+ PREFIX = 'd19778c812754854'.decode('hex')
ADDRS_TABLE = 'addrs_testnet'
class Main(object):
from twisted.internet import defer, reactor
from twisted.web import server
-import bitcoin_p2p
-import conv
+import bitcoin.p2p, bitcoin.getwork, bitcoin.data
import db
import expiring_dict
import jsonrpc
import p2p
-import p2pool
+import p2pool.data as p2pool
import util
import worker_interface
+try:
+ __version__ = subprocess.Popen(['svnversion', os.path.dirname(sys.argv[0])], stdout=subprocess.PIPE).stdout.read().strip()
+except:
+ __version__ = 'unknown'
+
+if hasattr(sys, "frozen"):
+ __file__ = sys.executable
+
class Chain(object):
def __init__(self, chain_id_data):
self.chain_id_data = chain_id_data
continue
coinbase_data = block['txs'][0]['tx_ins'][0]['script']
try:
- coinbase = p2pool.coinbase_type.unpack(coinbase_data, ignore_extra=True)
- except bitcoin_p2p.EarlyEnd:
+ coinbase = p2pool.coinbase_type.unpack(coinbase_data)
+ except bitcoin.data.EarlyEnd:
pass
else:
try:
def getwork(bitcoind):
while True:
try:
+ # a block could arrive in between these two queries
getwork_df, height_df = bitcoind.rpc_getwork(), bitcoind.rpc_getblocknumber()
- getwork, height = conv.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
+ try:
+ getwork, height = bitcoin.getwork.BlockAttempt.from_getwork((yield getwork_df)), (yield height_df)
+ finally:
+ # get rid of residual errors
+ getwork_df.addErrback(lambda fail: None)
+ height_df.addErrback(lambda fail: None)
except:
print
print 'Error getting work from bitcoind:'
traceback.print_exc()
print
+
+
yield util.sleep(1)
+
continue
defer.returnValue((getwork, height))
try:
net = p2pool.Testnet if args.testnet else p2pool.Main
- print name
+ print 'p2pool (version %s)' % (__version__,)
print
# connect to bitcoind over JSON-RPC and do initial getwork
# connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
print "Testing bitcoind P2P connection to '%s:%s'..." % (args.bitcoind_address, args.bitcoind_p2p_port)
- factory = bitcoin_p2p.ClientFactory(args.testnet)
+ factory = bitcoin.p2p.ClientFactory(args.testnet)
reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
while True:
try:
- res = yield (yield factory.getProtocol()).check_order(order=bitcoin_p2p.Protocol.null_order)
+ res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
if res['reply'] != 'success':
print
print 'Error getting payout script:'
if res == 'good':
share2 = chain.share2s[share.hash]
- hash_data = bitcoin_p2p.HashType().pack(share.hash)
- share1_data = p2pool.share1.pack(share.as_share1())
- for share_db in share_dbs:
- share_db[hash_data] = share1_data
+ def save():
+ hash_data = bitcoin.p2p.HashType().pack(share.hash)
+ share1_data = p2pool.share1.pack(share.as_share1())
+ for share_db in share_dbs:
+ share_db[hash_data] = share1_data
+ reactor.callLater(1, save)
if chain is current_work.value['current_chain']:
if share.hash == chain.highest.value:
previous_share2=state['highest_p2pool_share2'],
new_script=my_script,
subsidy=(50*100000000 >> state['height']//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs),
- nonce=random.randrange(2**64),
+ nonce=struct.pack("<Q", random.randrange(2**64)),
net=net,
)
print 'Generating, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
transactions = [generate_tx] + [tx.tx for tx in extra_txs]
- merkle_root = bitcoin_p2p.merkle_hash(transactions)
+ merkle_root = bitcoin.p2p.merkle_hash(transactions)
merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
ba = conv.BlockAttempt(state['version'], state['previous_block'], merkle_root, current_work2.value['timestamp'], state['bits'])
return ba.getwork(net.TARGET_MULTIPLIER)
class Tx(object):
def __init__(self, tx, seen_at_block):
- self.hash = bitcoin_p2p.tx_hash(tx)
+ self.hash = bitcoin.data.tx_hash(tx)
self.tx = tx
self.seen_at_block = seen_at_block
- self.mentions = set([bitcoin_p2p.tx_hash(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
+ self.mentions = set([bitcoin.data.tx_hash(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
#print
#print "%x %r" % (seen_at_block, tx)
#for mention in self.mentions:
if block_hash == self.seen_at_block:
return True
for tx in block['txs']:
- mentions = set([bitcoin_p2p.tx_hash(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
+ mentions = set([bitcoin.data.tx_hash(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']])
if mentions & self.mentions:
return False
return False
def new_tx(tx):
- seen_at_block = current_work.value['previous_block']
- tx_pool[bitcoin_p2p.tx_hash(tx)] = Tx(tx, seen_at_block)
+ tx_pool[bitcoin.data.tx_hash(tx)] = Tx(tx, current_work.value['previous_block'])
factory.new_tx.watch(new_tx)
- # disabled for now - txs can rely on past txs that are not yet in the block chain
- # bitcoin passes those txs along
- # if a p2pool program was started in between the tx-not-yet-included and the tx-depending-on-that-one
- # it would need to find the tx-not-yet-included
- # in fact, it has no way to know if a tx is included...
- # p2pool has to be able to access the entire blockchain
- # possibilities
- # access bitcoin's data files
- # include a few of the parent txs
- # patch bitcoind to find the block that includes a given tx hash
+
+ def new_block(block):
+ set_real_work()
+ factory.new_block.watch(new_block)
print 'Started successfully!'
print
-
- while True:
- yield set_real_work()
- yield util.sleep(.5)
except:
print
print 'Fatal error:'
traceback.print_exc()
print
- reactor.stop()
+ reactor.stop()
-if __name__ == '__main__':
- try:
- __version__ = subprocess.Popen(['svnversion', os.path.dirname(sys.argv[0])], stdout=subprocess.PIPE).stdout.read().strip()
- except:
- __version__ = 'unknown'
-
- name = 'p2pool (version %s)' % (__version__,)
-
- parser = argparse.ArgumentParser(description=name)
+def run():
+ parser = argparse.ArgumentParser(description='p2pool (version %s)' % (__version__,))
parser.add_argument('--version', action='version', version=__version__)
- parser.add_argument('-t', '--testnet',
+ parser.add_argument('--testnet',
help='use the testnet; make sure you change the ports too',
action='store_true', default=False, dest='testnet')
- parser.add_argument('-s', '--store-shares', metavar='FILENAME',
+ parser.add_argument('--store-shares', metavar='FILENAME',
help='write shares to a database (not needed for normal usage)',
type=str, action='append', default=[], dest='store_shares')
p2pool_group = parser.add_argument_group('p2pool interface')
- p2pool_group.add_argument('-p', '--p2pool-port', metavar='PORT',
+ p2pool_group.add_argument('--p2pool-port', metavar='PORT',
help='use TCP port PORT to listen for connections (default: 9333 normally, 19333 for testnet) (forward this port from your router!)',
type=int, action='store', default=None, dest='p2pool_port')
p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
args = parser.parse_args()
- if hasattr(sys, "frozen"):
- __file__ = sys.executable
-
if args.bitcoind_p2p_port is None:
args.bitcoind_p2p_port = {False: 8333, True: 18333}[args.testnet]
from twisted.internet import defer, protocol, reactor
-import bitcoin_p2p
-import conv
-import p2pool
+from bitcoin import p2p as bitcoin_p2p
+from bitcoin import data as bitcoin_data
+from p2pool import data as p2pool_data
import util
# mode
use_checksum = True
- message_types = {
- 'version': bitcoin_p2p.ComposedType([
- ('version', bitcoin_p2p.StructType('<I')),
- ('services', bitcoin_p2p.StructType('<Q')),
- ('addr_to', bitcoin_p2p.address),
- ('addr_from', bitcoin_p2p.address),
- ('nonce', bitcoin_p2p.StructType('<Q')),
- ('sub_version', bitcoin_p2p.VarStrType()),
- ('mode', bitcoin_p2p.StructType('<I')),
- ('state', bitcoin_p2p.ComposedType([
- ('chain_id', p2pool.chain_id_type),
- ('highest', bitcoin_p2p.ComposedType([
- ('hash', bitcoin_p2p.HashType()),
- ('height', bitcoin_p2p.StructType('<Q')),
- ])),
+ message_version = bitcoin_data.ComposedType([
+ ('version', bitcoin_data.StructType('<I')),
+ ('services', bitcoin_data.StructType('<Q')),
+ ('addr_to', bitcoin_data.address_type),
+ ('addr_from', bitcoin_data.address_type),
+ ('nonce', bitcoin_data.StructType('<Q')),
+ ('sub_version', bitcoin_data.VarStrType()),
+ ('mode', bitcoin_data.StructType('<I')),
+ ('state', bitcoin_data.ComposedType([
+ ('chain_id', p2pool_data.chain_id_type),
+ ('highest', bitcoin_data.ComposedType([
+ ('hash', bitcoin_data.HashType()),
+ ('height', bitcoin_data.StructType('<Q')),
])),
- ]),
-
- 'update_mode': bitcoin_p2p.ComposedType([
- ('mode', bitcoin_p2p.StructType('<I')),
- ]),
-
- 'ping': bitcoin_p2p.ComposedType([]),
-
- 'addrme': bitcoin_p2p.ComposedType([
- ('port', bitcoin_p2p.StructType('<H')),
- ]),
- 'addrs': bitcoin_p2p.ComposedType([
- ('addrs', bitcoin_p2p.ListType(bitcoin_p2p.ComposedType([
- ('timestamp', bitcoin_p2p.StructType('<Q')),
- ('address', bitcoin_p2p.address),
- ]))),
- ]),
- 'getaddrs': bitcoin_p2p.ComposedType([
- ('count', bitcoin_p2p.StructType('<I')),
- ]),
-
- 'gettobest': bitcoin_p2p.ComposedType([
- ('chain_id', p2pool.chain_id_type),
- ('have', bitcoin_p2p.ListType(bitcoin_p2p.HashType())),
- ]),
- 'getshares': bitcoin_p2p.ComposedType([
- ('chain_id', p2pool.chain_id_type),
- ('hashes', bitcoin_p2p.ListType(bitcoin_p2p.HashType())),
- ]),
-
- 'share0s': bitcoin_p2p.ComposedType([
- ('chains', bitcoin_p2p.ListType(bitcoin_p2p.ComposedType([
- ('chain_id', p2pool.chain_id_type),
- ('hashes', bitcoin_p2p.ListType(bitcoin_p2p.HashType())),
- ]))),
- ]),
- 'share1s': bitcoin_p2p.ComposedType([
- ('share1s', bitcoin_p2p.ListType(p2pool.share1)),
- ]),
- 'share2s': bitcoin_p2p.ComposedType([
- ('share2s', bitcoin_p2p.ListType(bitcoin_p2p.block)),
- ]),
- }
+ ])),
+ ])
+
+ message_update_mode = bitcoin_data.ComposedType([
+ ('mode', bitcoin_data.StructType('<I')),
+ ])
+
+ message_ping = bitcoin_data.ComposedType([])
+
+ message_addrme = bitcoin_data.ComposedType([
+ ('port', bitcoin_data.StructType('<H')),
+ ])
+ message_addrs = bitcoin_data.ComposedType([
+ ('addrs', bitcoin_data.ListType(bitcoin_data.ComposedType([
+ ('timestamp', bitcoin_data.StructType('<Q')),
+ ('address', bitcoin_data.address_type),
+ ]))),
+ ])
+ message_getaddrs = bitcoin_data.ComposedType([
+ ('count', bitcoin_data.StructType('<I')),
+ ])
+
+ message_gettobest = bitcoin_data.ComposedType([
+ ('chain_id', p2pool_data.chain_id_type),
+ ('have', bitcoin_data.ListType(bitcoin_data.HashType())),
+ ])
+ message_getshares = bitcoin_data.ComposedType([
+ ('chain_id', p2pool_data.chain_id_type),
+ ('hashes', bitcoin_data.ListType(bitcoin_data.HashType())),
+ ])
+
+ message_share0s = bitcoin_data.ComposedType([
+ ('chains', bitcoin_data.ListType(bitcoin_data.ComposedType([
+ ('chain_id', p2pool_data.chain_id_type),
+ ('hashes', bitcoin_data.ListType(bitcoin_data.HashType())),
+ ]))),
+ ])
+ message_share1s = bitcoin_data.ComposedType([
+ ('share1s', bitcoin_data.ListType(p2pool_data.share1_type)),
+ ])
+ message_share2s = bitcoin_data.ComposedType([
+ ('share2s', bitcoin_data.ListType(bitcoin_data.block_type)),
+ ])
other_version = None
node_var_watch = None
sub_version=self.sub_version,
mode=self.node.mode_var.value,
state=dict(
- chain_id=p2pool.chain_id_type.unpack(chain.chain_id_data),
+ chain_id=p2pool_data.chain_id_type.unpack(chain.chain_id_data),
highest=dict(
hash=highest_share2.share.hash if highest_share2 is not None else 2**256-1,
height=highest_share2.height if highest_share2 is not None else 0,
])
def handle_gettobest(self, chain_id, have):
- self.node.handle_get_to_best(p2pool.chain_id_type.pack(chain_id), have, self)
+ self.node.handle_get_to_best(p2pool_data.chain_id_type.pack(chain_id), have, self)
def handle_getshares(self, chain_id, hashes):
- self.node.handle_get_shares(p2pool.chain_id_type.pack(chain_id), hashes, self)
+ self.node.handle_get_shares(p2pool_data.chain_id_type.pack(chain_id), hashes, self)
def handle_share0s(self, chains):
for chain in chains:
for hash_ in chain['hashes']:
- self.node.handle_share_hash(p2pool.chain_id_type.pack(chain['chain_id']), hash_, self)
+ self.node.handle_share_hash(p2pool_data.chain_id_type.pack(chain['chain_id']), hash_, self)
def handle_share1s(self, share1s):
for share1 in share1s:
- hash_ = bitcoin_p2p.block_hash(share1['header'])
- if hash_ <= conv.bits_to_target(share1['header']['bits']):
+ hash_ = bitcoin_data.block_hash(share1['header'])
+ if hash_ <= bitcoin_data.bits_to_target(share1['header']['bits']):
print 'Dropping peer %s:%i due to invalid share' % (self.transport.getPeer().host, self.transport.getPeer().port)
self.transport.loseConnection()
return
- share = p2pool.Share(share1['header'], gentx=share1['gentx'])
+ share = p2pool_data.Share(share1['header'], gentx=share1['gentx'])
self.node.handle_share(share, self)
def handle_share2s(self, share2s):
for share2 in share2s:
- hash_ = bitcoin_p2p.block_hash(share2['header'])
- if not hash_ <= conv.bits_to_target(share2['header']['bits']):
+ hash_ = bitcoin_data.block_hash(share2['header'])
+ if not hash_ <= bitcoin_data.bits_to_target(share2['header']['bits']):
print 'Dropping peer %s:%i due to invalid share' % (self.transport.getPeer().host, self.transport.getPeer().port)
self.transport.loseConnection()
return
- share = p2pool.Share(share2['header'], txs=share2['txs'])
+ share = p2pool_data.Share(share2['header'], txs=share2['txs'])
self.node.handle_share(share, self)
def send_share(self, share, full=False):
- if share.hash <= conv.bits_to_target(share.header['bits']):
+ if share.hash <= bitcoin_data.bits_to_target(share.header['bits']):
self.send_share2s(share2s=[share.as_block()])
else:
if self.mode == 0 and not full:
self.send_share0s(chains=[dict(
- chain_id=p2pool.chain_id_type.unpack(share.chain_id_data),
+ chain_id=p2pool_data.chain_id_type.unpack(share.chain_id_data),
hashes=[share.hash],
)])
elif self.mode == 1 or full:
def clientConnectionLost(self, connector, reason):
self.node.attempt_ended(connector)
-addrdb_key = bitcoin_p2p.ComposedType([
- ('address', bitcoin_p2p.IPV6AddressType()),
- ('port', bitcoin_p2p.StructType('>H')),
+addrdb_key = bitcoin_data.ComposedType([
+ ('address', bitcoin_data.IPV6AddressType()),
+ ('port', bitcoin_data.StructType('>H')),
])
-addrdb_value = bitcoin_p2p.ComposedType([
- ('services', bitcoin_p2p.StructType('<Q')),
- ('first_seen', bitcoin_p2p.StructType('<Q')),
- ('last_seen', bitcoin_p2p.StructType('<Q')),
+addrdb_value = bitcoin_data.ComposedType([
+ ('services', bitcoin_data.StructType('<Q')),
+ ('first_seen', bitcoin_data.StructType('<Q')),
+ ('last_seen', bitcoin_data.StructType('<Q')),
])
class AddrStore(util.DictWrapper):
--- /dev/null
+from p2pool import main
+
+main.run()
author_email='forrest@forre.st',
url='http://p2pool.forre.st/',
- console=['main.py'],
+ console=['run_p2pool.py'],
)