Merge branch 'remove_low_bandwidth' into cruftless
authorForrest Voight <forrest@forre.st>
Sat, 3 Dec 2011 23:34:35 +0000 (18:34 -0500)
committerForrest Voight <forrest@forre.st>
Sat, 3 Dec 2011 23:34:35 +0000 (18:34 -0500)
Conflicts:
p2pool/main.py
p2pool/p2p.py

17 files changed:
COPYING [new file with mode: 0644]
p2pool/bitcoin/data.py
p2pool/bitcoin/i0coin.py [deleted file]
p2pool/bitcoin/ixcoin.py [deleted file]
p2pool/bitcoin/litecoin.py [deleted file]
p2pool/bitcoin/namecoin.py [deleted file]
p2pool/bitcoin/networks.py [new file with mode: 0644]
p2pool/bitcoin/p2p.py
p2pool/bitcoin/solidcoin.py [deleted file]
p2pool/bitcoin/worker_interface.py [moved from p2pool/worker_interface.py with 66% similarity]
p2pool/data.py
p2pool/main.py
p2pool/p2p.py
p2pool/skiplists.py
p2pool/util/jsonrpc.py
p2pool/util/math.py
p2pool/util/variable.py

diff --git a/COPYING b/COPYING
new file mode 100644 (file)
index 0000000..94a9ed0
--- /dev/null
+++ b/COPYING
@@ -0,0 +1,674 @@
+                    GNU GENERAL PUBLIC LICENSE
+                       Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+                            Preamble
+
+  The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+  The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works.  By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.  We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors.  You can apply it to
+your programs, too.
+
+  When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+  To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights.  Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+  For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received.  You must make sure that they, too, receive
+or can get the source code.  And you must show them these terms so they
+know their rights.
+
+  Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+  For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software.  For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+  Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so.  This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software.  The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable.  Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products.  If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+  Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary.  To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.
+
+                       TERMS AND CONDITIONS
+
+  0. Definitions.
+
+  "This License" refers to version 3 of the GNU General Public License.
+
+  "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+  "The Program" refers to any copyrightable work licensed under this
+License.  Each licensee is addressed as "you".  "Licensees" and
+"recipients" may be individuals or organizations.
+
+  To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy.  The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+  A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+  To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy.  Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+  To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies.  Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+  An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License.  If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+  1. Source Code.
+
+  The "source code" for a work means the preferred form of the work
+for making modifications to it.  "Object code" means any non-source
+form of a work.
+
+  A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+  The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form.  A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+  The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities.  However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work.  For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+  The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+  The Corresponding Source for a work in source code form is that
+same work.
+
+  2. Basic Permissions.
+
+  All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met.  This License explicitly affirms your unlimited
+permission to run the unmodified Program.  The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work.  This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+  You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force.  You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright.  Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+  Conveying under any other circumstances is permitted solely under
+the conditions stated below.  Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+  3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+  No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+  When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+  4. Conveying Verbatim Copies.
+
+  You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+  You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+  5. Conveying Modified Source Versions.
+
+  You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+    a) The work must carry prominent notices stating that you modified
+    it, and giving a relevant date.
+
+    b) The work must carry prominent notices stating that it is
+    released under this License and any conditions added under section
+    7.  This requirement modifies the requirement in section 4 to
+    "keep intact all notices".
+
+    c) You must license the entire work, as a whole, under this
+    License to anyone who comes into possession of a copy.  This
+    License will therefore apply, along with any applicable section 7
+    additional terms, to the whole of the work, and all its parts,
+    regardless of how they are packaged.  This License gives no
+    permission to license the work in any other way, but it does not
+    invalidate such permission if you have separately received it.
+
+    d) If the work has interactive user interfaces, each must display
+    Appropriate Legal Notices; however, if the Program has interactive
+    interfaces that do not display Appropriate Legal Notices, your
+    work need not make them do so.
+
+  A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit.  Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+  6. Conveying Non-Source Forms.
+
+  You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+    a) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by the
+    Corresponding Source fixed on a durable physical medium
+    customarily used for software interchange.
+
+    b) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by a
+    written offer, valid for at least three years and valid for as
+    long as you offer spare parts or customer support for that product
+    model, to give anyone who possesses the object code either (1) a
+    copy of the Corresponding Source for all the software in the
+    product that is covered by this License, on a durable physical
+    medium customarily used for software interchange, for a price no
+    more than your reasonable cost of physically performing this
+    conveying of source, or (2) access to copy the
+    Corresponding Source from a network server at no charge.
+
+    c) Convey individual copies of the object code with a copy of the
+    written offer to provide the Corresponding Source.  This
+    alternative is allowed only occasionally and noncommercially, and
+    only if you received the object code with such an offer, in accord
+    with subsection 6b.
+
+    d) Convey the object code by offering access from a designated
+    place (gratis or for a charge), and offer equivalent access to the
+    Corresponding Source in the same way through the same place at no
+    further charge.  You need not require recipients to copy the
+    Corresponding Source along with the object code.  If the place to
+    copy the object code is a network server, the Corresponding Source
+    may be on a different server (operated by you or a third party)
+    that supports equivalent copying facilities, provided you maintain
+    clear directions next to the object code saying where to find the
+    Corresponding Source.  Regardless of what server hosts the
+    Corresponding Source, you remain obligated to ensure that it is
+    available for as long as needed to satisfy these requirements.
+
+    e) Convey the object code using peer-to-peer transmission, provided
+    you inform other peers where the object code and Corresponding
+    Source of the work are being offered to the general public at no
+    charge under subsection 6d.
+
+  A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+  A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling.  In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage.  For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product.  A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+  "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source.  The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+  If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information.  But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+  The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed.  Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+  Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+  7. Additional Terms.
+
+  "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law.  If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+  When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it.  (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.)  You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+  Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+    a) Disclaiming warranty or limiting liability differently from the
+    terms of sections 15 and 16 of this License; or
+
+    b) Requiring preservation of specified reasonable legal notices or
+    author attributions in that material or in the Appropriate Legal
+    Notices displayed by works containing it; or
+
+    c) Prohibiting misrepresentation of the origin of that material, or
+    requiring that modified versions of such material be marked in
+    reasonable ways as different from the original version; or
+
+    d) Limiting the use for publicity purposes of names of licensors or
+    authors of the material; or
+
+    e) Declining to grant rights under trademark law for use of some
+    trade names, trademarks, or service marks; or
+
+    f) Requiring indemnification of licensors and authors of that
+    material by anyone who conveys the material (or modified versions of
+    it) with contractual assumptions of liability to the recipient, for
+    any liability that these contractual assumptions directly impose on
+    those licensors and authors.
+
+  All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10.  If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term.  If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+  If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+  Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+  8. Termination.
+
+  You may not propagate or modify a covered work except as expressly
+provided under this License.  Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+  However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+  Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+  Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License.  If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+  9. Acceptance Not Required for Having Copies.
+
+  You are not required to accept this License in order to receive or
+run a copy of the Program.  Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance.  However,
+nothing other than this License grants you permission to propagate or
+modify any covered work.  These actions infringe copyright if you do
+not accept this License.  Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+  10. Automatic Licensing of Downstream Recipients.
+
+  Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License.  You are not responsible
+for enforcing compliance by third parties with this License.
+
+  An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations.  If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+  You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License.  For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+  11. Patents.
+
+  A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based.  The
+work thus licensed is called the contributor's "contributor version".
+
+  A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version.  For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+  Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+  In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement).  To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+  If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients.  "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+  If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+  A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License.  You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+  Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+  12. No Surrender of Others' Freedom.
+
+  If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all.  For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+  13. Use with the GNU Affero General Public License.
+
+  Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work.  The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+  14. Revised Versions of this License.
+
+  The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time.  Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+  Each version is given a distinguishing version number.  If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation.  If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+  If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+  Later license versions may give you additional or different
+permissions.  However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+  15. Disclaimer of Warranty.
+
+  THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW.  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU.  SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+  16. Limitation of Liability.
+
+  IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+  17. Interpretation of Sections 15 and 16.
+
+  If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+                     END OF TERMS AND CONDITIONS
+
+            How to Apply These Terms to Your New Programs
+
+  If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+  To do so, attach the following notices to the program.  It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+    <one line to give the program's name and a brief idea of what it does.>
+    Copyright (C) <year>  <name of author>
+
+    This program is free software: you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation, either version 3 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+  If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+    <program>  Copyright (C) <year>  <name of author>
+    This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+    This is free software, and you are welcome to redistribute it
+    under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License.  Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+  You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+<http://www.gnu.org/licenses/>.
+
+  The GNU General Public License does not permit incorporating your program
+into proprietary programs.  If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library.  If this is what you want to do, use the GNU Lesser General
+Public License instead of this License.  But first, please read
+<http://www.gnu.org/philosophy/why-not-lgpl.html>.
index e0c1d9a..6b81aac 100644 (file)
@@ -105,13 +105,10 @@ class Type(object):
     
     def hash256(self, obj):
         return HashType().unpack(hashlib.sha256(hashlib.sha256(self.pack(obj)).digest()).digest())
-
-    ltc_scrypt = None
+    
     def scrypt(self, obj):
-        # dynamically import ltc_scrypt so you will only get an error on runtime
-        if (not self.ltc_scrypt):
-            self.ltc_scrypt = __import__('ltc_scrypt')
-        return HashType().unpack(self.ltc_scrypt.getPoWHash(self.pack(obj)))
+        import ltc_scrypt
+        return HashType().unpack(ltc_scrypt.getPoWHash(self.pack(obj)))
 
 class VarIntType(Type):
     # redundancy doesn't matter here because bitcoin and p2pool both reencode before hashing
@@ -355,7 +352,7 @@ class FloatingInteger(object):
         elif isinstance(other, (int, long)):
             return cmp(self._value, other)
         else:
-            raise NotImplementedError()
+            raise NotImplementedError(other)
     
     def __int__(self):
         return self._value
@@ -439,10 +436,12 @@ tx_type = ComposedType([
     ('lock_time', StructType('<I')),
 ])
 
+merkle_branch_type = ListType(HashType())
+
 merkle_tx_type = ComposedType([
     ('tx', tx_type),
     ('block_hash', HashType()),
-    ('merkle_branch', ListType(HashType())),
+    ('merkle_branch', merkle_branch_type),
     ('index', StructType('<i')),
 ])
 
@@ -462,7 +461,7 @@ block_type = ComposedType([
 
 aux_pow_type = ComposedType([
     ('merkle_tx', merkle_tx_type),
-    ('merkle_branch', ListType(HashType())),
+    ('merkle_branch', merkle_branch_type),
     ('index', StructType('<i')),
     ('parent_block_header', block_header_type),
 ])
@@ -482,9 +481,42 @@ def merkle_hash(tx_list):
             for left, right in zip(hash_list[::2], hash_list[1::2] + [None])]
     return hash_list[0]
 
+def calculate_merkle_branch(txs, index):
+    # XXX optimize this
+    
+    hash_list = [(tx_type.hash256(tx), i == index, []) for i, tx in enumerate(txs)]
+    
+    while len(hash_list) > 1:
+        hash_list = [
+            (
+                merkle_record_type.hash256(dict(left=left, right=right)),
+                left_f or right_f,
+                (left_l if left_f else right_l) + [dict(side=1, hash=right) if left_f else dict(side=0, hash=left)],
+            )
+            for (left, left_f, left_l), (right, right_f, right_l) in
+                zip(hash_list[::2], hash_list[1::2] + [hash_list[::2][-1]])
+        ]
+    
+    res = [x['hash'] for x in hash_list[0][2]]
+    
+    assert hash_list[0][1]
+    assert check_merkle_branch(txs[index], index, res) == hash_list[0][0]
+    assert index == sum(k*2**i for i, k in enumerate([1-x['side'] for x in hash_list[0][2]]))
+    
+    return res
+
+def check_merkle_branch(tx, index, merkle_branch):
+    return reduce(lambda c, (i, h): merkle_record_type.hash256(
+        dict(left=h, right=c) if 2**i & index else
+        dict(left=c, right=h)
+    ), enumerate(merkle_branch), tx_type.hash256(tx))
+
 def target_to_average_attempts(target):
     return 2**256//(target + 1)
 
+def target_to_difficulty(target):
+    return (0xffff0000 * 2**(256-64) + 1)/(target + 1)
+
 # tx
 
 def tx_get_sigop_count(tx):
@@ -841,31 +873,3 @@ if __name__ == '__main__':
             for a in x[1]:
                 print str(a).rjust(10),
         print
-
-# network definitions
-
-class Mainnet(object):
-    BITCOIN_P2P_PREFIX = 'f9beb4d9'.decode('hex')
-    BITCOIN_P2P_PORT = 8333
-    BITCOIN_ADDRESS_VERSION = 0
-    BITCOIN_RPC_PORT = 8332
-    BITCOIN_RPC_CHECK = staticmethod(defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
-        'name_firstupdate' not in (yield bitcoind.rpc_help()) and
-        'ixcoinaddress' not in (yield bitcoind.rpc_help()) and
-        not (yield bitcoind.rpc_getinfo())['testnet']
-    )))
-    BITCOIN_SUBSIDY_FUNC = staticmethod(lambda height: 50*100000000 >> (height + 1)//210000)
-    BITCOIN_SYMBOL = 'BTC'
-
-class Testnet(object):
-    BITCOIN_P2P_PREFIX = 'fabfb5da'.decode('hex')
-    BITCOIN_P2P_PORT = 18333
-    BITCOIN_ADDRESS_VERSION = 111
-    BITCOIN_RPC_PORT = 8332
-    BITCOIN_RPC_CHECK = staticmethod(defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
-        'name_firstupdate' not in (yield bitcoind.rpc_help()) and
-        'ixcoinaddress' not in (yield bitcoind.rpc_help()) and
-        (yield bitcoind.rpc_getinfo())['testnet']
-    )))
-    BITCOIN_SUBSIDY_FUNC = staticmethod(lambda height: 50*100000000 >> (height + 1)//210000)
-    BITCOIN_SYMBOL = 'tBTC'
diff --git a/p2pool/bitcoin/i0coin.py b/p2pool/bitcoin/i0coin.py
deleted file mode 100644 (file)
index 03441d8..0000000
+++ /dev/null
@@ -1,29 +0,0 @@
-from twisted.internet import defer
-
-class Mainnet(object):
-    BITCOIN_P2P_PREFIX = 'f1b2b3d4'.decode('hex')
-    BITCOIN_P2P_PORT = 7333
-    BITCOIN_ADDRESS_VERSION = 105
-    BITCOIN_RPC_PORT = 7332
-    BITCOIN_RPC_CHECK = staticmethod(defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
-        'name_firstupdate' not in (yield bitcoind.rpc_help()) and
-        'ixcoinaddress' not in (yield bitcoind.rpc_help()) and
-        'i0coinaddress' in (yield bitcoind.rpc_help()) and
-        not (yield bitcoind.rpc_getinfo())['testnet']
-    )))
-    BITCOIN_SUBSIDY_FUNC = staticmethod(lambda height: 48*100000000 >> (height + 1)//218750)
-    BITCOIN_SYMBOL = 'I0C'
-
-class Testnet(object):
-    BITCOIN_P2P_PREFIX = 'f5b6b7d8'.decode('hex')
-    BITCOIN_P2P_PORT = 17333
-    BITCOIN_ADDRESS_VERSION = 112
-    BITCOIN_RPC_PORT = 7332
-    BITCOIN_RPC_CHECK = staticmethod(defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
-        'name_firstupdate' not in (yield bitcoind.rpc_help()) and
-        'ixcoinaddress' not in (yield bitcoind.rpc_help()) and
-        'i0coinaddress' in (yield bitcoind.rpc_help()) and
-        (yield bitcoind.rpc_getinfo())['testnet']
-    )))
-    BITCOIN_SUBSIDY_FUNC = staticmethod(lambda height: 48*100000000 >> (height + 1)//218750)
-    BITCOIN_SYMBOL = 'tI0C'
diff --git a/p2pool/bitcoin/ixcoin.py b/p2pool/bitcoin/ixcoin.py
deleted file mode 100644 (file)
index 4aa1b46..0000000
+++ /dev/null
@@ -1,27 +0,0 @@
-from twisted.internet import defer
-
-class Mainnet(object):
-    BITCOIN_P2P_PREFIX = 'f9beb4d9'.decode('hex')
-    BITCOIN_P2P_PORT = 8337
-    BITCOIN_ADDRESS_VERSION = 138
-    BITCOIN_RPC_PORT = 8338
-    BITCOIN_RPC_CHECK = staticmethod(defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
-        'name_firstupdate' not in (yield bitcoind.rpc_help()) and
-        'ixcoinaddress' in (yield bitcoind.rpc_help()) and
-        not (yield bitcoind.rpc_getinfo())['testnet']
-    )))
-    BITCOIN_SUBSIDY_FUNC = staticmethod(lambda height: 96*100000000 >> (height + 1)//210000)
-    BITCOIN_SYMBOL = 'IXC'
-
-class Testnet(object):
-    BITCOIN_P2P_PREFIX = 'fabfb5da'.decode('hex')
-    BITCOIN_P2P_PORT = 18337
-    BITCOIN_ADDRESS_VERSION = 111
-    BITCOIN_RPC_PORT = 8338
-    BITCOIN_RPC_CHECK = staticmethod(defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
-        'name_firstupdate' not in (yield bitcoind.rpc_help()) and
-        'ixcoinaddress' in (yield bitcoind.rpc_help()) and
-        (yield bitcoind.rpc_getinfo())['testnet']
-    )))
-    BITCOIN_SUBSIDY_FUNC = staticmethod(lambda height: 96*100000000 >> (height + 1)//210000)
-    BITCOIN_SYMBOL = 'tIXC'
diff --git a/p2pool/bitcoin/litecoin.py b/p2pool/bitcoin/litecoin.py
deleted file mode 100644 (file)
index b83da0a..0000000
+++ /dev/null
@@ -1,27 +0,0 @@
-from twisted.internet import defer
-
-class Mainnet(object):
-    BITCOIN_P2P_PREFIX = 'fbc0b6db'.decode('hex')
-    BITCOIN_P2P_PORT = 9333
-    BITCOIN_ADDRESS_VERSION = 48
-    BITCOIN_RPC_PORT = 9332
-    BITCOIN_RPC_CHECK = staticmethod(defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
-        'litecoinaddress' in (yield bitcoind.rpc_help()) and
-        not (yield bitcoind.rpc_getinfo())['testnet']
-    )))
-    BITCOIN_SUBSIDY_FUNC = staticmethod(lambda height: 50*100000000 >> (height + 1)//840000)
-    BITCOIN_POW_SCRYPT = True;
-    BITCOIN_SYMBOL = 'LTC'
-
-class Testnet(object):
-    BITCOIN_P2P_PREFIX = 'fcc1b7dc'.decode('hex')
-    BITCOIN_P2P_PORT = 19333
-    BITCOIN_ADDRESS_VERSION = 111
-    BITCOIN_RPC_PORT = 19332
-    BITCOIN_RPC_CHECK = staticmethod(defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
-        'litecoinaddress' in (yield bitcoind.rpc_help()) and
-        (yield bitcoind.rpc_getinfo())['testnet']
-    )))
-    BITCOIN_SUBSIDY_FUNC = staticmethod(lambda height: 50*100000000 >> (height + 1)//840000)
-    BITCOIN_POW_SCRYPT = True;
-    BITCOIN_SYMBOL = 'tLTC'
diff --git a/p2pool/bitcoin/namecoin.py b/p2pool/bitcoin/namecoin.py
deleted file mode 100644 (file)
index bdead37..0000000
+++ /dev/null
@@ -1,27 +0,0 @@
-from twisted.internet import defer
-
-class Mainnet(object):
-    BITCOIN_P2P_PREFIX = 'f9beb4fe'.decode('hex')
-    BITCOIN_P2P_PORT = 8334
-    BITCOIN_ADDRESS_VERSION = 52
-    BITCOIN_RPC_PORT = 8332
-    BITCOIN_RPC_CHECK = staticmethod(defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
-        'name_firstupdate' in (yield bitcoind.rpc_help()) and
-        'ixcoinaddress' not in (yield bitcoind.rpc_help()) and
-        not (yield bitcoind.rpc_getinfo())['testnet']
-    )))
-    BITCOIN_SUBSIDY_FUNC = staticmethod(lambda height: 50*100000000 >> (height + 1)//210000)
-    BITCOIN_SYMBOL = 'NMC'
-
-class Testnet(object):
-    BITCOIN_P2P_PREFIX = 'fabfb5fe'.decode('hex')
-    BITCOIN_P2P_PORT = 18334
-    BITCOIN_ADDRESS_VERSION = 111
-    BITCOIN_RPC_PORT = 8332
-    BITCOIN_RPC_CHECK = staticmethod(defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
-        'name_firstupdate' in (yield bitcoind.rpc_help()) and
-        'ixcoinaddress' not in (yield bitcoind.rpc_help()) and
-        (yield bitcoind.rpc_getinfo())['testnet']
-    )))
-    BITCOIN_SUBSIDY_FUNC = staticmethod(lambda height: 50*100000000 >> (height + 1)//210000)
-    BITCOIN_SYMBOL = 'tNMC'
diff --git a/p2pool/bitcoin/networks.py b/p2pool/bitcoin/networks.py
new file mode 100644 (file)
index 0000000..7f9d358
--- /dev/null
@@ -0,0 +1,162 @@
+from twisted.internet import defer
+
+from . import data
+
+
+class BitcoinMainnet(object):
+    BITCOIN_P2P_PREFIX = 'f9beb4d9'.decode('hex')
+    BITCOIN_P2P_PORT = 8333
+    BITCOIN_ADDRESS_VERSION = 0
+    BITCOIN_RPC_PORT = 8332
+    BITCOIN_RPC_CHECK = staticmethod(defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
+        'name_firstupdate' not in (yield bitcoind.rpc_help()) and
+        'ixcoinaddress' not in (yield bitcoind.rpc_help()) and
+        not (yield bitcoind.rpc_getinfo())['testnet']
+    )))
+    BITCOIN_SUBSIDY_FUNC = staticmethod(lambda height: 50*100000000 >> (height + 1)//210000)
+    BITCOIN_POW_FUNC = data.block_header_type.hash256
+    BITCOIN_SYMBOL = 'BTC'
+
+class BitcoinTestnet(object):
+    BITCOIN_P2P_PREFIX = 'fabfb5da'.decode('hex')
+    BITCOIN_P2P_PORT = 18333
+    BITCOIN_ADDRESS_VERSION = 111
+    BITCOIN_RPC_PORT = 8332
+    BITCOIN_RPC_CHECK = staticmethod(defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
+        'name_firstupdate' not in (yield bitcoind.rpc_help()) and
+        'ixcoinaddress' not in (yield bitcoind.rpc_help()) and
+        (yield bitcoind.rpc_getinfo())['testnet']
+    )))
+    BITCOIN_SUBSIDY_FUNC = staticmethod(lambda height: 50*100000000 >> (height + 1)//210000)
+    BITCOIN_POW_FUNC = data.block_header_type.hash256
+    BITCOIN_SYMBOL = 'tBTC'
+
+
+class NamecoinMainnet(object):
+    BITCOIN_P2P_PREFIX = 'f9beb4fe'.decode('hex')
+    BITCOIN_P2P_PORT = 8334
+    BITCOIN_ADDRESS_VERSION = 52
+    BITCOIN_RPC_PORT = 8332
+    BITCOIN_RPC_CHECK = staticmethod(defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
+        'name_firstupdate' in (yield bitcoind.rpc_help()) and
+        'ixcoinaddress' not in (yield bitcoind.rpc_help()) and
+        not (yield bitcoind.rpc_getinfo())['testnet']
+    )))
+    BITCOIN_SUBSIDY_FUNC = staticmethod(lambda height: 50*100000000 >> (height + 1)//210000)
+    BITCOIN_POW_FUNC = data.block_header_type.hash256
+    BITCOIN_SYMBOL = 'NMC'
+
+class NamecoinTestnet(object):
+    BITCOIN_P2P_PREFIX = 'fabfb5fe'.decode('hex')
+    BITCOIN_P2P_PORT = 18334
+    BITCOIN_ADDRESS_VERSION = 111
+    BITCOIN_RPC_PORT = 8332
+    BITCOIN_RPC_CHECK = staticmethod(defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
+        'name_firstupdate' in (yield bitcoind.rpc_help()) and
+        'ixcoinaddress' not in (yield bitcoind.rpc_help()) and
+        (yield bitcoind.rpc_getinfo())['testnet']
+    )))
+    BITCOIN_SUBSIDY_FUNC = staticmethod(lambda height: 50*100000000 >> (height + 1)//210000)
+    BITCOIN_POW_FUNC = data.block_header_type.hash256
+    BITCOIN_SYMBOL = 'tNMC'
+
+
+class IxcoinMainnet(object):
+    BITCOIN_P2P_PREFIX = 'f9beb4d9'.decode('hex')
+    BITCOIN_P2P_PORT = 8337
+    BITCOIN_ADDRESS_VERSION = 138
+    BITCOIN_RPC_PORT = 8338
+    BITCOIN_RPC_CHECK = staticmethod(defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
+        'name_firstupdate' not in (yield bitcoind.rpc_help()) and
+        'ixcoinaddress' in (yield bitcoind.rpc_help()) and
+        not (yield bitcoind.rpc_getinfo())['testnet']
+    )))
+    BITCOIN_SUBSIDY_FUNC = staticmethod(lambda height: 96*100000000 >> (height + 1)//210000)
+    BITCOIN_POW_FUNC = data.block_header_type.hash256
+    BITCOIN_SYMBOL = 'IXC'
+
+class IxcoinTestnet(object):
+    BITCOIN_P2P_PREFIX = 'fabfb5da'.decode('hex')
+    BITCOIN_P2P_PORT = 18337
+    BITCOIN_ADDRESS_VERSION = 111
+    BITCOIN_RPC_PORT = 8338
+    BITCOIN_RPC_CHECK = staticmethod(defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
+        'name_firstupdate' not in (yield bitcoind.rpc_help()) and
+        'ixcoinaddress' in (yield bitcoind.rpc_help()) and
+        (yield bitcoind.rpc_getinfo())['testnet']
+    )))
+    BITCOIN_SUBSIDY_FUNC = staticmethod(lambda height: 96*100000000 >> (height + 1)//210000)
+    BITCOIN_POW_FUNC = data.block_header_type.hash256
+    BITCOIN_SYMBOL = 'tIXC'
+
+
+class I0coinMainnet(object):
+    BITCOIN_P2P_PREFIX = 'f1b2b3d4'.decode('hex')
+    BITCOIN_P2P_PORT = 7333
+    BITCOIN_ADDRESS_VERSION = 105
+    BITCOIN_RPC_PORT = 7332
+    BITCOIN_RPC_CHECK = staticmethod(defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
+        'name_firstupdate' not in (yield bitcoind.rpc_help()) and
+        'ixcoinaddress' not in (yield bitcoind.rpc_help()) and
+        'i0coinaddress' in (yield bitcoind.rpc_help()) and
+        not (yield bitcoind.rpc_getinfo())['testnet']
+    )))
+    BITCOIN_SUBSIDY_FUNC = staticmethod(lambda height: 48*100000000 >> (height + 1)//218750)
+    BITCOIN_POW_FUNC = data.block_header_type.hash256
+    BITCOIN_SYMBOL = 'I0C'
+
+class I0coinTestnet(object):
+    BITCOIN_P2P_PREFIX = 'f5b6b7d8'.decode('hex')
+    BITCOIN_P2P_PORT = 17333
+    BITCOIN_ADDRESS_VERSION = 112
+    BITCOIN_RPC_PORT = 7332
+    BITCOIN_RPC_CHECK = staticmethod(defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
+        'name_firstupdate' not in (yield bitcoind.rpc_help()) and
+        'ixcoinaddress' not in (yield bitcoind.rpc_help()) and
+        'i0coinaddress' in (yield bitcoind.rpc_help()) and
+        (yield bitcoind.rpc_getinfo())['testnet']
+    )))
+    BITCOIN_SUBSIDY_FUNC = staticmethod(lambda height: 48*100000000 >> (height + 1)//218750)
+    BITCOIN_POW_FUNC = data.block_header_type.hash256
+    BITCOIN_SYMBOL = 'tI0C'
+
+
+class SolidcoinMainnet(object):
+    BITCOIN_P2P_PREFIX = 'deadbabe'.decode('hex')
+    BITCOIN_P2P_PORT = 7555
+    BITCOIN_ADDRESS_VERSION = 125
+    BITCOIN_RPC_PORT = 8332
+    BITCOIN_RPC_CHECK = staticmethod(defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
+        'solidcoinaddress' in (yield bitcoind.rpc_help()) and
+        not (yield bitcoind.rpc_getinfo())['testnet']
+    )))
+    BITCOIN_SUBSIDY_FUNC = staticmethod(lambda height: 32*100000000 >> (height + 1)//300000)
+    BITCOIN_POW_FUNC = data.block_header_type.hash256
+    BITCOIN_SYMBOL = 'SC'
+
+
+class LitecoinMainnet(object):
+    BITCOIN_P2P_PREFIX = 'fbc0b6db'.decode('hex')
+    BITCOIN_P2P_PORT = 9333
+    BITCOIN_ADDRESS_VERSION = 48
+    BITCOIN_RPC_PORT = 9332
+    BITCOIN_RPC_CHECK = staticmethod(defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
+        'litecoinaddress' in (yield bitcoind.rpc_help()) and
+        not (yield bitcoind.rpc_getinfo())['testnet']
+    )))
+    BITCOIN_SUBSIDY_FUNC = staticmethod(lambda height: 50*100000000 >> (height + 1)//840000)
+    BITCOIN_POW_FUNC = data.block_header_type.scrypt
+    BITCOIN_SYMBOL = 'LTC'
+
+class LitecoinTestnet(object):
+    BITCOIN_P2P_PREFIX = 'fcc1b7dc'.decode('hex')
+    BITCOIN_P2P_PORT = 19333
+    BITCOIN_ADDRESS_VERSION = 111
+    BITCOIN_RPC_PORT = 19332
+    BITCOIN_RPC_CHECK = staticmethod(defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
+        'litecoinaddress' in (yield bitcoind.rpc_help()) and
+        (yield bitcoind.rpc_getinfo())['testnet']
+    )))
+    BITCOIN_SUBSIDY_FUNC = staticmethod(lambda height: 50*100000000 >> (height + 1)//840000)
+    BITCOIN_POW_FUNC = data.block_header_type.scrypt
+    BITCOIN_SYMBOL = 'tLTC'
index a979899..a41caaa 100644 (file)
@@ -113,9 +113,6 @@ class Protocol(BaseProtocol):
     def use_checksum(self):
         return self.version >= 209
     
-    
-    null_order = '\0'*60
-    
     def connectionMade(self):
         BaseProtocol.connectionMade(self)
         
@@ -160,8 +157,6 @@ class Protocol(BaseProtocol):
         self.ready()
     
     def ready(self):
-        self.check_order = deferral.GenericDeferrer(2**256, lambda id, order: self.send_checkorder(id=id, order=order))
-        self.submit_order = deferral.GenericDeferrer(2**256, lambda id, order: self.send_submitorder(id=id, order=order))
         self.get_block = deferral.ReplyMatcher(lambda hash: self.send_getdata(requests=[dict(type='block', hash=hash)]))
         self.get_block_header = deferral.ReplyMatcher(lambda hash: self.send_getheaders(version=1, have=[], last=hash))
         self.get_tx = deferral.ReplyMatcher(lambda hash: self.send_getdata(requests=[dict(type='tx', hash=hash)]))
@@ -250,9 +245,6 @@ class Protocol(BaseProtocol):
         ('reply',  bitcoin_data.EnumType(bitcoin_data.StructType('<I'), {'success': 0, 'failure': 1, 'denied': 2})),
         ('script', bitcoin_data.PossiblyNoneType('', bitcoin_data.VarStrType())),
     ])
-    def handle_reply(self, hash, reply, script):
-        self.check_order.got_response(hash, dict(reply=reply, script=script))
-        self.submit_order.got_response(hash, dict(reply=reply, script=script))
     
     message_ping = bitcoin_data.ComposedType([])
     def handle_ping(self):
diff --git a/p2pool/bitcoin/solidcoin.py b/p2pool/bitcoin/solidcoin.py
deleted file mode 100644 (file)
index 763f3c0..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
-from twisted.internet import defer
-
-class Mainnet(object):
-    BITCOIN_P2P_PREFIX = 'deadbabe'.decode('hex')
-    BITCOIN_P2P_PORT = 7555
-    BITCOIN_ADDRESS_VERSION = 125
-    BITCOIN_RPC_PORT = 8332
-    BITCOIN_RPC_CHECK = staticmethod(defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
-        'solidcoinaddress' in (yield bitcoind.rpc_help()) and
-        not (yield bitcoind.rpc_getinfo())['testnet']
-    )))
-    BITCOIN_SUBSIDY_FUNC = staticmethod(lambda height: 32*100000000 >> (height + 1)//300000)
-    BITCOIN_SYMBOL = 'SC'
similarity index 66%
rename from p2pool/worker_interface.py
rename to p2pool/bitcoin/worker_interface.py
index c87db86..92dfe7d 100644 (file)
@@ -10,22 +10,7 @@ from twisted.python import log
 import p2pool
 from p2pool import data as p2pool_data
 from p2pool.util import jsonrpc, deferred_resource, variable
-from p2pool.bitcoin import data as bitcoin_data
-
-def get_username(request):
-    try:
-        return base64.b64decode(request.getHeader('Authorization').split(' ', 1)[1]).split(':')[0]
-    except: # XXX
-        return None
-
-def get_payout_script(request, net):
-    user = get_username(request)
-    if user is None:
-        return None
-    try:
-        return bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(user, net))
-    except: # XXX blah
-        return None
+from p2pool.bitcoin import getwork
 
 def get_memory(request):
     if request.getHeader('X-Miner-Extensions') is not None and 'workidentifier' in request.getHeader('X-Miner-Extensions').split(' '):
@@ -38,12 +23,32 @@ def get_memory(request):
     if 'cpuminer' in user_agent2: return 0
     if 'tenebrix miner' in user_agent2: return 0
     if 'ufasoft' in user_agent2: return 0 # not confirmed
-    if 'cgminer' in user_agent2: return 1
+    if 'cgminer' in user_agent2: return 0
     if 'poclbm' in user_agent2: return 1
     if 'phoenix' in user_agent2: return 2
     print 'Unknown miner User-Agent:', repr(user_agent)
     return 0
 
+def get_max_target(request): # inclusive
+    if request.getHeader('X-All-Targets') is not None or (request.getHeader('X-Miner-Extensions') is not None and 'alltargets' in request.getHeader('X-Miner-Extensions')):
+        return 2**256-1
+    user_agent = request.getHeader('User-Agent')
+    user_agent2 = '' if user_agent is None else user_agent.lower()
+    if 'java' in user_agent2 or 'diablominer' in user_agent2: return 2**256//2**32-1 # hopefully diablominer...
+    if 'cpuminer' in user_agent2: return 2**256//2**32-1
+    if 'tenebrix miner' in user_agent2: return 2**256-1
+    if 'cgminer' in user_agent2: return 2**256//2**32-1
+    if 'poclbm' in user_agent2: return 2**256//2**32-1
+    if 'phoenix' in user_agent2: return 2**256//2**32-1
+    print 'Unknown miner User-Agent:', repr(user_agent)
+    return 2**256//2**32-1
+
+def get_username(request):
+    try:
+        return base64.b64decode(request.getHeader('Authorization').split(' ', 1)[1]).split(':')[0]
+    except: # XXX
+        return None
+
 def get_id(request):
     return request.getClientIP(), request.getHeader('Authorization')
 
@@ -100,15 +105,15 @@ class LongPollingWorkerInterface(deferred_resource.DeferredResource):
     render_POST = render_GET
 
 class WorkerInterface(jsonrpc.Server):
-    def __init__(self, work, compute, response_callback, net):
+    def __init__(self, compute, response_callback, new_work_event=variable.Event()):
         jsonrpc.Server.__init__(self)
         
-        self.work = work
         self.compute = compute
         self.response_callback = response_callback
-        self.net = net
+        self.new_work_event = new_work_event
+        
         self.holds = Holds()
-        self.last_cache_invalidation = {}
+        self.worker_views = {}
         
         self.putChild('long-polling', LongPollingWorkerInterface(self))
         self.putChild('', self)
@@ -119,10 +124,9 @@ class WorkerInterface(jsonrpc.Server):
         request.setHeader('X-Roll-NTime', 'expire=60')
         
         if data is not None:
-            defer.returnValue(self.response_callback(data, get_username(request)))
+            defer.returnValue(self.response_callback(getwork.decode_data(data), request))
         
         defer.returnValue((yield self.getwork(request)))
-    rpc_getwork.takes_request = True
     
     @defer.inlineCallbacks
     def getwork(self, request, long_poll=False):
@@ -133,33 +137,31 @@ class WorkerInterface(jsonrpc.Server):
         if p2pool.DEBUG:
             print 'POLL %i START long_poll=%r user_agent=%r x-work-identifier=%r user=%r' % (id, long_poll, request.getHeader('User-Agent'), request.getHeader('X-Work-Identifier'), get_username(request))
         
-        if request_id not in self.last_cache_invalidation:
-            self.last_cache_invalidation[request_id] = variable.Variable((None, None))
+        if request_id not in self.worker_views:
+            self.worker_views[request_id] = variable.Variable((0, (None, None))) # times, (previous_block/-1, previous_block/-2)
         
-        yield self.holds.wait_hold(request_id)
-        work = self.work.value
-        thought_work = self.last_cache_invalidation[request_id].value
+        thought_times, thought_work = self.worker_views[request_id].value
         
-        if long_poll and work == thought_work[-1]:
+        if long_poll and thought_times == self.new_work_event.times:
             if p2pool.DEBUG:
                 print 'POLL %i WAITING user=%r' % (id, get_username(request))
-            yield defer.DeferredList([self.work.changed.get_deferred(), self.last_cache_invalidation[request_id].changed.get_deferred()], fireOnOneCallback=True)
-        work = self.work.value
+            yield defer.DeferredList([self.new_work_event.get_deferred(), self.worker_views[request_id].changed.get_deferred()], fireOnOneCallback=True)
+        
+        yield self.holds.wait_hold(request_id)
         
-        if thought_work[-1] is not None and work != thought_work[-1] and any(x is None or work['previous_block'] == x['previous_block'] for x in thought_work[-memory or len(thought_work):]):
+        res, identifier = self.compute(request)
+        
+        if thought_work[-1] is not None and self.new_work_event.times != thought_times and any(x is None or res.previous_block == x for x in thought_work[-memory or len(thought_work):]):
             # clients won't believe the update
-            work = work.copy()
-            work['previous_block'] = random.randrange(2**256)
+            res = res.update(previous_block=random.randrange(2**256))
             if p2pool.DEBUG:
                 print 'POLL %i FAKED user=%r' % (id, get_username(request))
             self.holds.set_hold(request_id, .01)
-        res = self.compute(work, get_payout_script(request, self.net))
         
-        self.last_cache_invalidation[request_id].set((thought_work[-1], work))
+        self.worker_views[request_id].set((self.new_work_event.times if long_poll else thought_times, (thought_work[-1], res.previous_block)))
         if p2pool.DEBUG:
-            print 'POLL %i END %s user=%r' % (id, p2pool_data.format_hash(work['best_share_hash']), get_username(request))
+            print 'POLL %i END %s user=%r' % (id, p2pool_data.format_hash(identifier), get_username(request)) # XXX identifier is hack
         
-        if request.getHeader('X-All-Targets') is None and res.share_target > self.net.MAX_TARGET:
-            res = res.update(share_target=self.net.MAX_TARGET)
-
-        defer.returnValue(res.getwork(identifier=str(work['best_share_hash'])))
+        res = res.update(share_target=min(res.share_target, get_max_target(request)))
+        
+        defer.returnValue(res.getwork(identifier=str(identifier)))
index a3f2d9a..a018556 100644 (file)
 from __future__ import division
 
+TRANSITION_TIME = 1322804365
+
 import itertools
 import random
 import time
 import os
+import struct
 
 from twisted.python import log
 
 import p2pool
 from p2pool import skiplists
-from p2pool.bitcoin import data as bitcoin_data, script, namecoin, ixcoin, i0coin, solidcoin, litecoin
+from p2pool.bitcoin import data as bitcoin_data, script, networks
 from p2pool.util import memoize, expiring_dict, math
 
 
-merkle_branch_type = bitcoin_data.ListType(bitcoin_data.ComposedType([
-    ('side', bitcoin_data.StructType('<B')), # enum?
-    ('hash', bitcoin_data.HashType()),
-]))
-
-
-share_data_type = bitcoin_data.ComposedType([
+new_share_data_type = bitcoin_data.ComposedType([
     ('previous_share_hash', bitcoin_data.PossiblyNoneType(0, bitcoin_data.HashType())),
-    ('target', bitcoin_data.FloatingIntegerType()),
+    ('coinbase', bitcoin_data.VarStrType()),
     ('nonce', bitcoin_data.VarStrType()),
-])
-
-
-coinbase_type = bitcoin_data.ComposedType([
-    ('identifier', bitcoin_data.FixedStrType(8)),
-    ('share_data', share_data_type),
-])
-
-share_info_type = bitcoin_data.ComposedType([
-    ('share_data', share_data_type),
     ('new_script', bitcoin_data.VarStrType()),
     ('subsidy', bitcoin_data.StructType('<Q')),
+    ('donation', bitcoin_data.StructType('<H')),
+    ('stale_frac', bitcoin_data.StructType('<B')),
 ])
 
+new_share_info_type = bitcoin_data.ComposedType([
+    ('new_share_data', new_share_data_type),
+    ('target', bitcoin_data.FloatingIntegerType()),
+    ('timestamp', bitcoin_data.StructType('<I')),
+])
 
-share1a_type = bitcoin_data.ComposedType([
+new_share1a_type = bitcoin_data.ComposedType([
     ('header', bitcoin_data.block_header_type),
-    ('share_info', share_info_type),
-    ('merkle_branch', merkle_branch_type),
+    ('share_info', new_share_info_type),
+    ('merkle_branch', bitcoin_data.merkle_branch_type),
 ])
 
-share1b_type = bitcoin_data.ComposedType([
+new_share1b_type = bitcoin_data.ComposedType([
     ('header', bitcoin_data.block_header_type),
-    ('share_info', share_info_type),
+    ('share_info', new_share_info_type),
     ('other_txs', bitcoin_data.ListType(bitcoin_data.tx_type)),
 ])
 
-def calculate_merkle_branch(txs, index):
-    hash_list = [(bitcoin_data.tx_type.hash256(tx), i == index, []) for i, tx in enumerate(txs)]
-    
-    while len(hash_list) > 1:
-        hash_list = [
-            (
-                bitcoin_data.merkle_record_type.hash256(dict(left=left, right=right)),
-                left_f or right_f,
-                (left_l if left_f else right_l) + [dict(side=1, hash=right) if left_f else dict(side=0, hash=left)],
-            )
-            for (left, left_f, left_l), (right, right_f, right_l) in
-                zip(hash_list[::2], hash_list[1::2] + [hash_list[::2][-1]])
-        ]
-    
-    assert hash_list[0][1]
-    assert check_merkle_branch(txs[index], hash_list[0][2]) == hash_list[0][0]
-    
-    return hash_list[0][2]
+# type:
+# 0: new_share1a
+# 1: new_share1b
 
-def check_merkle_branch(tx, branch):
-    hash_ = bitcoin_data.tx_type.hash256(tx)
-    for step in branch:
-        if not step['side']:
-            hash_ = bitcoin_data.merkle_record_type.hash256(dict(left=step['hash'], right=hash_))
-        else:
-            hash_ = bitcoin_data.merkle_record_type.hash256(dict(left=hash_, right=step['hash']))
-    return hash_
-
-def gentx_to_share_info(gentx):
-    return dict(
-        share_data=coinbase_type.unpack(gentx['tx_ins'][0]['script'])['share_data'],
-        subsidy=sum(tx_out['value'] for tx_out in gentx['tx_outs']),
-        new_script=gentx['tx_outs'][-1]['script'],
-    )
-
-def share_info_to_gentx(share_info, block_target, tracker, net):
-    return generate_transaction(
-        tracker=tracker,
-        previous_share_hash=share_info['share_data']['previous_share_hash'],
-        new_script=share_info['new_script'],
-        subsidy=share_info['subsidy'],
-        nonce=share_info['share_data']['nonce'],
-        block_target=block_target,
-        net=net,
-    )
+new_share_type = bitcoin_data.ComposedType([
+    ('type', bitcoin_data.VarIntType()),
+    ('contents', bitcoin_data.VarStrType()),
+])
 
-class Share(object):
-    @classmethod
-    def from_block(cls, block, net):
-        return cls(net, block['header'], gentx_to_share_info(block['txs'][0]), other_txs=block['txs'][1:])
-    
+class NewShare(object):
     @classmethod
     def from_share1a(cls, share1a, net):
         return cls(net, **share1a)
@@ -109,13 +62,48 @@ class Share(object):
     def from_share1b(cls, share1b, net):
         return cls(net, **share1b)
     
-    __slots__ = 'header previous_block share_info merkle_branch other_txs timestamp share_data new_script subsidy previous_hash previous_share_hash target nonce bitcoin_hash hash time_seen shared stored peer'.split(' ')
+    def as_share1a(self):
+        return dict(header=self.header, share_info=self.share_info, merkle_branch=self.merkle_branch)
+    
+    def as_share1b(self):
+        if self.other_txs is None:
+            raise ValueError('share does not contain all txs')
+        
+        return dict(header=self.header, share_info=self.share_info, other_txs=self.other_txs)
+    
+    def __repr__(self):
+        return '<Share %s>' % (' '.join('%s=%r' % (k, getattr(self, k)) for k in self.__slots__),)
+    
+    __slots__ = 'header previous_block share_info merkle_branch other_txs timestamp share_data new_script subsidy previous_hash previous_share_hash target nonce pow_hash header_hash hash time_seen peer donation stale_frac'.split(' ')
+    
+    @classmethod
+    def from_share(cls, share, net):
+        if share['type'] == 0:
+            res = cls.from_share1a(new_share1a_type.unpack(share['contents']), net)
+            if not (res.pow_hash > res.header['target']):
+                raise ValueError('invalid share type')
+            return res
+        elif share['type'] == 1:
+            res = cls.from_share1b(new_share1b_type.unpack(share['contents']), net)
+            if not (res.pow_hash <= res.header['target']):
+                raise ValueError('invalid share type')
+            return res
+        else:
+            raise ValueError('unknown share type: %r' % (share['type'],))
+    
+    def as_share(self):
+        if self.pow_hash > self.header['target']: # new_share1a
+            return dict(type=0, contents=new_share1a_type.pack(self.as_share1a()))
+        elif self.pow_hash <= self.header['target']: # new_share1b
+            return dict(type=1, contents=new_share1b_type.pack(self.as_share1b()))
+        else:
+            raise AssertionError()
     
     def __init__(self, net, header, share_info, merkle_branch=None, other_txs=None):
         if merkle_branch is None and other_txs is None:
             raise ValueError('need either merkle_branch or other_txs')
         if other_txs is not None:
-            new_merkle_branch = calculate_merkle_branch([dict(version=0, tx_ins=[], tx_outs=[], lock_time=0)] + other_txs, 0)
+            new_merkle_branch = bitcoin_data.calculate_merkle_branch([dict(version=0, tx_ins=[], tx_outs=[], lock_time=0)] + other_txs, 0)
             if merkle_branch is not None:
                 if merke_branch != new_merkle_branch:
                     raise ValueError('invalid merkle_branch and other_txs')
@@ -130,87 +118,64 @@ class Share(object):
         self.merkle_branch = merkle_branch
         self.other_txs = other_txs
         
-        self.timestamp = self.header['timestamp']
+        self.share_data = self.share_info['new_share_data']
+        self.target = self.share_info['target']
+        self.timestamp = self.share_info['timestamp']
         
-        self.share_data = self.share_info['share_data']
-        self.new_script = self.share_info['new_script']
-        self.subsidy = self.share_info['subsidy']
+        self.new_script = self.share_data['new_script']
+        self.subsidy = self.share_data['subsidy']
+        self.donation = self.share_data['donation']
         
         if len(self.new_script) > 100:
             raise ValueError('new_script too long!')
         
         self.previous_hash = self.previous_share_hash = self.share_data['previous_share_hash']
-        self.target = self.share_data['target']
         self.nonce = self.share_data['nonce']
         
         if len(self.nonce) > 100:
             raise ValueError('nonce too long!')
-
-        # use scrypt for Litecoin
-        if (getattr(net, 'BITCOIN_POW_SCRYPT', False)):
-            self.bitcoin_hash = bitcoin_data.block_header_type.scrypt(header)
-            self.hash = share1a_type.scrypt(self.as_share1a())
-        else:
-            self.bitcoin_hash = bitcoin_data.block_header_type.hash256(header)
-            self.hash = share1a_type.hash256(self.as_share1a())
-
-        if self.bitcoin_hash > self.target:
-            print 'hash %x' % self.bitcoin_hash
+        
+        if len(self.share_data['coinbase']) > 100:
+            raise ValueError('''coinbase too large! %i bytes''' % (len(self.share_data['coinbase']),))
+        
+        self.pow_hash = net.BITCOIN_POW_FUNC(header)
+        self.header_hash = bitcoin_data.block_header_type.hash256(header)
+        
+        self.hash = new_share1a_type.hash256(self.as_share1a())
+        
+        if self.pow_hash > self.target:
+            print 'hash %x' % self.pow_hash
             print 'targ %x' % self.target
             raise ValueError('not enough work!')
         
         if script.get_sigop_count(self.new_script) > 1:
             raise ValueError('too many sigops!')
         
+        self.stale_frac = self.share_data['stale_frac']/254 if self.share_data['stale_frac'] != 255 else None
+        
         # XXX eww
         self.time_seen = time.time()
-        self.shared = False
-        self.stored = False
         self.peer = None
     
+    def check(self, tracker, now, net):
+        if self.header['timestamp'] <= TRANSITION_TIME - 3600:
+            raise AssertionError('transitioning...')
+        
+        share_info, gentx = new_generate_transaction(tracker, self.share_info['new_share_data'], self.header['target'], self.share_info['timestamp'], net)
+        if share_info != self.share_info:
+            raise ValueError('share difficulty invalid')
+        
+        if bitcoin_data.check_merkle_branch(gentx, 0, self.merkle_branch) != self.header['merkle_root']:
+            raise ValueError('''gentx doesn't match header via merkle_branch''')
+    
     def as_block(self, tracker, net):
         if self.other_txs is None:
             raise ValueError('share does not contain all txs')
         
-        gentx = share_info_to_gentx(self.share_info, self.header['target'], tracker, net)
+        share_info, gentx = new_generate_transaction(tracker, self.share_info['new_share_data'], self.header['target'], self.share_info['timestamp'], net)
+        assert share_info == self.share_info
         
         return dict(header=self.header, txs=[gentx] + self.other_txs)
-    
-    def as_share1a(self):
-        return dict(header=self.header, share_info=self.share_info, merkle_branch=self.merkle_branch)
-    
-    def as_share1b(self):
-        return dict(header=self.header, share_info=self.share_info, other_txs=self.other_txs)
-    
-    def check(self, tracker, now, net):
-        import time
-        if self.previous_share_hash is not None:
-            if self.header['timestamp'] <= math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(self.previous_share_hash), 11)), use_float=False):
-                raise ValueError('share from too far in the past!')
-        
-        if self.header['timestamp'] > now + 2*60*60:
-            raise ValueError('share from too far in the future!')
-        
-        gentx = share_info_to_gentx(self.share_info, self.header['target'], tracker, net)
-        
-        if len(gentx['tx_ins'][0]['script']) > 100:
-            raise ValueError('''coinbase too large! %i bytes''' % (len(gentx['tx_ins'][0]['script']),))
-        
-        if check_merkle_branch(gentx, self.merkle_branch) != self.header['merkle_root']:
-            raise ValueError('''gentx doesn't match header via merkle_branch''')
-        
-        if self.other_txs is not None:
-            if bitcoin_data.merkle_hash([gentx] + self.other_txs) != self.header['merkle_root']:
-                raise ValueError('''gentx doesn't match header via other_txs''')
-            
-            if len(bitcoin_data.block_type.pack(dict(header=self.header, txs=[gentx] + self.other_txs))) > 1000000 - 1000:
-                raise ValueError('''block size too large''')
-    
-    def flag_shared(self):
-        self.shared = True
-    
-    def __repr__(self):
-        return '<Share %s>' % (' '.join('%s=%r' % (k, getattr(self, k)) for k in self.__slots__),)
 
 def get_pool_attempts_per_second(tracker, previous_share_hash, net, dist=None):
     if dist is None:
@@ -223,68 +188,72 @@ def get_pool_attempts_per_second(tracker, previous_share_hash, net, dist=None):
         time = 1
     return attempts//time
 
-def generate_transaction(tracker, previous_share_hash, new_script, subsidy, nonce, block_target, net):
+def new_generate_transaction(tracker, new_share_data, block_target, desired_timestamp, net):
+    previous_share_hash = new_share_data['previous_share_hash']
+    new_script = new_share_data['new_script']
+    subsidy = new_share_data['subsidy']
+    donation = new_share_data['donation']
+    assert 0 <= donation <= 65535
+    
+    if len(new_share_data['coinbase']) > 100:
+        raise ValueError('coinbase too long!')
+    
+    previous_share = tracker.shares[previous_share_hash] if previous_share_hash is not None else None
+    
     height, last = tracker.get_height_and_last(previous_share_hash)
     assert height >= net.CHAIN_LENGTH or last is None
     if height < net.TARGET_LOOKBEHIND:
         target = bitcoin_data.FloatingInteger.from_target_upper_bound(net.MAX_TARGET)
     else:
         attempts_per_second = get_pool_attempts_per_second(tracker, previous_share_hash, net)
-        previous_share = tracker.shares[previous_share_hash] if previous_share_hash is not None else None
         pre_target = 2**256//(net.SHARE_PERIOD*attempts_per_second) - 1
         pre_target2 = math.clip(pre_target, (previous_share.target*9//10, previous_share.target*11//10))
         pre_target3 = math.clip(pre_target2, (0, net.MAX_TARGET))
         target = bitcoin_data.FloatingInteger.from_target_upper_bound(pre_target3)
     
     attempts_to_block = bitcoin_data.target_to_average_attempts(block_target)
-    max_weight = net.SPREAD * attempts_to_block
+    max_att = net.SPREAD * attempts_to_block
+    
+    this_att = min(bitcoin_data.target_to_average_attempts(target), max_att)
+    other_weights, other_total_weight, other_donation_weight = tracker.get_cumulative_weights(previous_share_hash, min(height, net.CHAIN_LENGTH), 65535*max(0, max_att - this_att))
+    assert other_total_weight == sum(other_weights.itervalues()) + other_donation_weight, (other_total_weight, sum(other_weights.itervalues()) + other_donation_weight)
+    weights, total_weight, donation_weight = math.add_dicts([{new_script: this_att*(65535-donation)}, other_weights]), this_att*65535 + other_total_weight, this_att*donation + other_donation_weight
+    assert total_weight == sum(weights.itervalues()) + donation_weight, (total_weight, sum(weights.itervalues()) + donation_weight)
+    
+    SCRIPT = '4104ffd03de44a6e11b9917f3a29f9443283d9871c9d743ef30d5eddcd37094b64d1b3d8090496b53256786bf5c82932ec23c3b74d9f05a6f95a8b5529352656664bac'.decode('hex')
+    
+    # 1 satoshi is always donated so that a list of p2pool generated blocks can be easily found by looking at the donation address
+    amounts = dict((script, (subsidy-1)*(199*weight)//(200*total_weight)) for (script, weight) in weights.iteritems())
+    amounts[new_script] = amounts.get(new_script, 0) + (subsidy-1)//200
+    amounts[SCRIPT] = amounts.get(SCRIPT, 0) + (subsidy-1)*(199*donation_weight)//(200*total_weight)
+    amounts[SCRIPT] = amounts.get(SCRIPT, 0) + subsidy - sum(amounts.itervalues()) # collect any extra satoshis :P
     
-    this_weight = min(bitcoin_data.target_to_average_attempts(target), max_weight)
-    other_weights, other_weights_total = tracker.get_cumulative_weights(previous_share_hash, min(height, net.CHAIN_LENGTH), max(0, max_weight - this_weight))
-    dest_weights, total_weight = math.add_dicts([{new_script: this_weight}, other_weights]), this_weight + other_weights_total
-    assert total_weight == sum(dest_weights.itervalues())
-
-    if net.SCRIPT:
-        amounts = dict((script, subsidy*(396*weight)//(400*total_weight)) for (script, weight) in dest_weights.iteritems())
-        amounts[new_script] = amounts.get(new_script, 0) + subsidy*2//400
-        amounts[net.SCRIPT] = amounts.get(net.SCRIPT, 0) + subsidy*2//400
-        amounts[net.SCRIPT] = amounts.get(net.SCRIPT, 0) + subsidy - sum(amounts.itervalues()) # collect any extra
-    else:
-        amounts = dict((script, subsidy*(398*weight)//(400*total_weight)) for (script, weight) in dest_weights.iteritems())
-        amounts[new_script] = amounts.get(new_script, 0) + subsidy*2//400
-        amounts[new_script] = amounts.get(new_script, 0) + subsidy - sum(amounts.itervalues()) # collect any extra
-
     if sum(amounts.itervalues()) != subsidy:
         raise ValueError()
     if any(x < 0 for x in amounts.itervalues()):
         raise ValueError()
     
-    pre_dests = sorted(amounts.iterkeys(), key=lambda script: (amounts[script], script))
-    pre_dests = pre_dests[-4000:] # block length limit, unlikely to ever be hit
+    dests = sorted(amounts.iterkeys(), key=lambda script: (amounts[script], script))
+    dests = dests[-4000:] # block length limit, unlikely to ever be hit
     
-    dests = sorted(pre_dests, key=lambda script: (script == new_script, script))
-    assert dests[-1] == new_script
+    share_info = dict(
+        new_share_data=new_share_data,
+        target=target,
+        timestamp=math.clip(desired_timestamp, (previous_share.timestamp - 60, previous_share.timestamp + 60)) if previous_share is not None else desired_timestamp,
+    )
     
-    return dict(
+    return share_info, dict(
         version=1,
         tx_ins=[dict(
             previous_output=None,
             sequence=None,
-            script=coinbase_type.pack(dict(
-                identifier=net.IDENTIFIER,
-                share_data=dict(
-                    previous_share_hash=previous_share_hash,
-                    nonce=nonce,
-                    target=target,
-                ),
-            )),
+            script=new_share_data['coinbase'].ljust(2, '\x00'),
         )],
-        tx_outs=[dict(value=amounts[script], script=script) for script in dests if amounts[script]],
+        tx_outs=[dict(value=0, script='\x20' + bitcoin_data.HashType().pack(new_share_info_type.hash256(share_info)))] + [dict(value=amounts[script], script=script) for script in dests if amounts[script]],
         lock_time=0,
     )
 
 
-
 class OkayTracker(bitcoin_data.Tracker):
     def __init__(self, net):
         bitcoin_data.Tracker.__init__(self)
@@ -368,7 +337,7 @@ class OkayTracker(bitcoin_data.Tracker):
         
         
         if p2pool.DEBUG:
-            print len(self.verified.tails), "chain tails and", len(self.verified.tails.get(best_tail, [])), 'chain heads. Top 10 tails:'
+            print len(self.verified.tails), "chain tails and", len(self.verified.tails.get(best_tail, [])), 'chain heads. Top 10 heads:'
             if len(scores) > 10:
                 print '    ...'
             for h in scores[-10:]:
@@ -431,7 +400,7 @@ class OkayTracker(bitcoin_data.Tracker):
         
         if best is not None:
             best_share = self.verified.shares[best]
-            if ht.get_min_height(best_share.header['previous_block']) < ht.get_min_height(previous_block) and best_share.bitcoin_hash != previous_block and best_share.peer is not None:
+            if ht.get_min_height(best_share.header['previous_block']) < ht.get_min_height(previous_block) and best_share.header_hash != previous_block and best_share.peer is not None:
                 if p2pool.DEBUG:
                     print 'Stale detected! %x < %x' % (best_share.header['previous_block'], previous_block)
                 best = best_share.previous_hash
@@ -461,6 +430,8 @@ def format_hash(x):
 class ShareStore(object):
     def __init__(self, prefix, net):
         self.filename = prefix
+        self.dirname = os.path.dirname(os.path.abspath(prefix))
+        self.filename = os.path.basename(os.path.abspath(prefix))
         self.net = net
         self.known = None # will be filename -> set of share hashes, set of verified hashes
     
@@ -477,17 +448,17 @@ class ShareStore(object):
                         type_id_str, data_hex = line.strip().split(' ')
                         type_id = int(type_id_str)
                         if type_id == 0:
-                            share = Share.from_share1a(share1a_type.unpack(data_hex.decode('hex')), self.net)
-                            yield 'share', share
-                            share_hashes.add(share.hash)
+                            pass
                         elif type_id == 1:
-                            share = Share.from_share1b(share1b_type.unpack(data_hex.decode('hex')), self.net)
-                            yield 'share', share
-                            share_hashes.add(share.hash)
+                            pass
                         elif type_id == 2:
                             verified_hash = int(data_hex, 16)
                             yield 'verified_hash', verified_hash
                             verified_hashes.add(verified_hash)
+                        elif type_id == 5:
+                            share = NewShare.from_share(new_share_type.unpack(data_hex.decode('hex')), self.net)
+                            yield 'share', share
+                            share_hashes.add(share.hash)
                         else:
                             raise NotImplementedError("share type %i" % (type_id,))
                     except Exception:
@@ -507,10 +478,7 @@ class ShareStore(object):
         return filename
     
     def add_share(self, share):
-        if share.bitcoin_hash <= share.header['target']:
-            type_id, data = 1, share1b_type.pack(share.as_share1b())
-        else:
-            type_id, data = 0, share1a_type.pack(share.as_share1a())
+        type_id, data = 5, new_share_type.pack(share.as_share())
         filename = self._add_line("%i %s" % (type_id, data.encode('hex')))
         share_hashes, verified_hashes = self.known.setdefault(filename, (set(), set()))
         share_hashes.add(share.hash)
@@ -521,8 +489,8 @@ class ShareStore(object):
         verified_hashes.add(share_hash)
     
     def get_filenames_and_next(self):
-        suffixes = sorted(int(x[len(self.filename):]) for x in os.listdir('.') if x.startswith(self.filename) and x[len(self.filename):].isdigit())
-        return [self.filename + str(suffix) for suffix in suffixes], self.filename + str(suffixes[-1] + 1) if suffixes else self.filename + str(0)
+        suffixes = sorted(int(x[len(self.filename):]) for x in os.listdir(self.dirname) if x.startswith(self.filename) and x[len(self.filename):].isdigit())
+        return [os.path.join(self.dirname, self.filename + str(suffix)) for suffix in suffixes], os.path.join(self.dirname, self.filename + (str(suffixes[-1] + 1) if suffixes else str(0)))
     
     def forget_share(self, share_hash):
         for filename, (share_hashes, verified_hashes) in self.known.iteritems():
@@ -547,12 +515,11 @@ class ShareStore(object):
             os.remove(filename)
             print "REMOVED", filename
 
-class BitcoinMainnet(bitcoin_data.Mainnet):
+class BitcoinMainnet(networks.BitcoinMainnet):
     SHARE_PERIOD = 10 # seconds
-    CHAIN_LENGTH = 24*60*60//5 # shares
+    CHAIN_LENGTH = 24*60*60//10//2 # shares
     TARGET_LOOKBEHIND = 200 # shares
     SPREAD = 3 # blocks
-    SCRIPT = '4104ffd03de44a6e11b9917f3a29f9443283d9871c9d743ef30d5eddcd37094b64d1b3d8090496b53256786bf5c82932ec23c3b74d9f05a6f95a8b5529352656664bac'.decode('hex')
     IDENTIFIER = 'fc70035c7a81bc6f'.decode('hex')
     PREFIX = '2472ef181efcd37b'.decode('hex')
     NAME = 'bitcoin'
@@ -561,12 +528,11 @@ class BitcoinMainnet(bitcoin_data.Mainnet):
     PERSIST = True
     WORKER_PORT = 9332
 
-class BitcoinTestnet(bitcoin_data.Testnet):
+class BitcoinTestnet(networks.BitcoinTestnet):
     SHARE_PERIOD = 1 # seconds
-    CHAIN_LENGTH = 24*60*60//5 # shares
+    CHAIN_LENGTH = 24*60*60//10//2 # shares
     TARGET_LOOKBEHIND = 200 # shares
     SPREAD = 3 # blocks
-    SCRIPT = '410403ad3dee8ab3d8a9ce5dd2abfbe7364ccd9413df1d279bf1a207849310465b0956e5904b1155ecd17574778f9949589ebfd4fb33ce837c241474a225cf08d85dac'.decode('hex')
     IDENTIFIER = '5fc2be2d4f0d6bfb'.decode('hex')
     PREFIX = '3f6057a15036f441'.decode('hex')
     NAME = 'bitcoin_testnet'
@@ -575,12 +541,11 @@ class BitcoinTestnet(bitcoin_data.Testnet):
     PERSIST = False
     WORKER_PORT = 19332
 
-class NamecoinMainnet(namecoin.Mainnet):
+class NamecoinMainnet(networks.NamecoinMainnet):
     SHARE_PERIOD = 10 # seconds
     CHAIN_LENGTH = 24*60*60//10 # shares
     TARGET_LOOKBEHIND = 3600//10 # shares
     SPREAD = 3 # blocks
-    SCRIPT = '41043da5beb73f8f18cede1a41b0ed953123f1342b8e0216ab5bf71ed3e024201b4017f472bddb6041f17978d89ed8f8ed84f9e726b0bca80cacf96347c7153e8df0ac'.decode('hex')
     IDENTIFIER = 'd5b1192062c4c454'.decode('hex')
     PREFIX = 'b56f3d0fb24fc982'.decode('hex')
     NAME = 'namecoin'
@@ -589,12 +554,11 @@ class NamecoinMainnet(namecoin.Mainnet):
     PERSIST = True
     WORKER_PORT = 9331
 
-class NamecoinTestnet(namecoin.Testnet):
+class NamecoinTestnet(networks.NamecoinTestnet):
     SHARE_PERIOD = 1 # seconds
     CHAIN_LENGTH = 24*60*60//5 # shares
     TARGET_LOOKBEHIND = 200 # shares
     SPREAD = 3 # blocks
-    SCRIPT = '410403ad3dee8ab3d8a9ce5dd2abfbe7364ccd9413df1d279bf1a207849310465b0956e5904b1155ecd17574778f9949589ebfd4fb33ce837c241474a225cf08d85dac'.decode('hex')
     IDENTIFIER = '8dd303d014a01a60'.decode('hex')
     PREFIX = '4d6581d24f51acbf'.decode('hex')
     NAME = 'namecoin_testnet'
@@ -603,12 +567,11 @@ class NamecoinTestnet(namecoin.Testnet):
     PERSIST = False
     WORKER_PORT = 19331
 
-class IxcoinMainnet(ixcoin.Mainnet):
+class IxcoinMainnet(networks.IxcoinMainnet):
     SHARE_PERIOD = 10 # seconds
     CHAIN_LENGTH = 24*60*60//10 # shares
     TARGET_LOOKBEHIND = 3600//10 # shares
     SPREAD = 3 # blocks
-    SCRIPT = '41043da5beb73f8f18cede1a41b0ed953123f1342b8e0216ab5bf71ed3e024201b4017f472bddb6041f17978d89ed8f8ed84f9e726b0bca80cacf96347c7153e8df0ac'.decode('hex')
     IDENTIFIER = '27b564116e2a2666'.decode('hex')
     PREFIX = '9dd6c4a619401f2f'.decode('hex')
     NAME = 'ixcoin'
@@ -617,12 +580,11 @@ class IxcoinMainnet(ixcoin.Mainnet):
     PERSIST = True
     WORKER_PORT = 9330
 
-class IxcoinTestnet(ixcoin.Testnet):
+class IxcoinTestnet(networks.IxcoinTestnet):
     SHARE_PERIOD = 1 # seconds
     CHAIN_LENGTH = 24*60*60//5 # shares
     TARGET_LOOKBEHIND = 200 # shares
     SPREAD = 3 # blocks
-    SCRIPT = '410403ad3dee8ab3d8a9ce5dd2abfbe7364ccd9413df1d279bf1a207849310465b0956e5904b1155ecd17574778f9949589ebfd4fb33ce837c241474a225cf08d85dac'.decode('hex')
     IDENTIFIER = '7430cbeb01249e44'.decode('hex')
     PREFIX = '7cfffda946709c1f'.decode('hex')
     NAME = 'ixcoin_testnet'
@@ -631,12 +593,11 @@ class IxcoinTestnet(ixcoin.Testnet):
     PERSIST = False
     WORKER_PORT = 19330
 
-class I0coinMainnet(i0coin.Mainnet):
+class I0coinMainnet(networks.I0coinMainnet):
     SHARE_PERIOD = 10 # seconds
     CHAIN_LENGTH = 24*60*60//10 # shares
     TARGET_LOOKBEHIND = 3600//10 # shares
     SPREAD = 3 # blocks
-    SCRIPT = '410403ad3dee8ab3d8a9ce5dd2abfbe7364ccd9413df1d279bf1a207849310465b0956e5904b1155ecd17574778f9949589ebfd4fb33ce837c241474a225cf08d85dac'.decode('hex')
     IDENTIFIER = 'b32e3f10c2ff221b'.decode('hex')
     PREFIX = '6155537ed977a3b5'.decode('hex')
     NAME = 'i0coin'
@@ -645,12 +606,11 @@ class I0coinMainnet(i0coin.Mainnet):
     PERSIST = False
     WORKER_PORT = 9329
 
-class I0coinTestnet(i0coin.Testnet):
+class I0coinTestnet(networks.I0coinTestnet):
     SHARE_PERIOD = 1 # seconds
     CHAIN_LENGTH = 24*60*60//5 # shares
     TARGET_LOOKBEHIND = 200 # shares
     SPREAD = 3 # blocks
-    SCRIPT = '410403ad3dee8ab3d8a9ce5dd2abfbe7364ccd9413df1d279bf1a207849310465b0956e5904b1155ecd17574778f9949589ebfd4fb33ce837c241474a225cf08d85dac'.decode('hex')
     IDENTIFIER = '7712c1a8181b5f2e'.decode('hex')
     PREFIX = '792d2e7d770fbe68'.decode('hex')
     NAME = 'i0coin_testnet'
@@ -659,12 +619,11 @@ class I0coinTestnet(i0coin.Testnet):
     PERSIST = False
     WORKER_PORT = 19329
 
-class SolidcoinMainnet(solidcoin.Mainnet):
+class SolidcoinMainnet(networks.SolidcoinMainnet):
     SHARE_PERIOD = 10
     CHAIN_LENGTH = 24*60*60//10 # shares
     TARGET_LOOKBEHIND = 3600//10 # shares
     SPREAD = 3 # blocks
-    SCRIPT = bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash('sMKZ1yxHETxPYKh4Z2anWnwZDJZU7ztroy', solidcoin.Mainnet))
     IDENTIFIER = '9cc9c421cca258cd'.decode('hex')
     PREFIX = 'c059125b8070f00a'.decode('hex')
     NAME = 'solidcoin'
@@ -673,12 +632,11 @@ class SolidcoinMainnet(solidcoin.Mainnet):
     PERSIST = True
     WORKER_PORT = 9328
 
-class LitecoinMainnet(litecoin.Mainnet):
+class LitecoinMainnet(networks.LitecoinMainnet):
     SHARE_PERIOD = 10 # seconds
-    CHAIN_LENGTH = 24*60*60//5 # shares
+    CHAIN_LENGTH = 24*60*60//10//2 # shares
     TARGET_LOOKBEHIND = 200 # shares
     SPREAD = 12 # blocks
-    SCRIPT = None # no fee
     IDENTIFIER = 'e037d5b8c6923410'.decode('hex')
     PREFIX = '7208c1a53ef629b0'.decode('hex')
     NAME = 'litecoin'
@@ -687,12 +645,11 @@ class LitecoinMainnet(litecoin.Mainnet):
     PERSIST = True
     WORKER_PORT = 9327
 
-class LitecoinTestnet(litecoin.Testnet):
+class LitecoinTestnet(networks.LitecoinTestnet):
     SHARE_PERIOD = 1 # seconds
     CHAIN_LENGTH = 24*60*60//5 # shares
     TARGET_LOOKBEHIND = 200 # shares
     SPREAD = 12 # blocks
-    SCRIPT = None # no fee
     IDENTIFIER = 'cca5e24ec6408b1e'.decode('hex')
     PREFIX = 'ad9614f6466a39cf'.decode('hex')
     NAME = 'litecoin_testnet'
index ac0fc42..9d86c98 100644 (file)
@@ -20,65 +20,41 @@ from twisted.web import server, resource
 from twisted.python import log
 from nattraverso import portmapper, ipdiscover
 
-import bitcoin.p2p, bitcoin.getwork, bitcoin.data
+import bitcoin.p2p as bitcoin_p2p, bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
+from bitcoin import worker_interface
 from util import db, expiring_dict, jsonrpc, variable, deferral, math
-from . import p2p, worker_interface, skiplists
-import p2pool.data as p2pool
-import p2pool as p2pool_init
+from . import p2p, skiplists
+import p2pool, p2pool.data as p2pool_data
 
 @deferral.retry('Error getting work from bitcoind:', 3)
 @defer.inlineCallbacks
-def getwork(bitcoind, ht, net):
-    try:
-        work = yield bitcoind.rpc_getmemorypool()
-        defer.returnValue(dict(
-            version=work['version'],
-            previous_block_hash=int(work['previousblockhash'], 16),
-            transactions=[bitcoin.data.tx_type.unpack(x.decode('hex')) for x in work['transactions']],
-            subsidy=work['coinbasevalue'],
-            time=work['time'],
-            target=bitcoin.data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin.data.FloatingInteger(work['bits']),
-        ))
-    except jsonrpc.Error, e:
-        if e.code != -32601:
-            raise
-        
-        print "---> Update your bitcoind to support the 'getmemorypool' RPC call. Not including transactions in generated blocks! <---"
-        work = bitcoin.getwork.BlockAttempt.from_getwork((yield bitcoind.rpc_getwork()))
-        try:
-            subsidy = net.BITCOIN_SUBSIDY_FUNC(ht.getHeight(work.previous_block))
-        except ValueError:
-            subsidy = net.BITCOIN_SUBSIDY_FUNC(1000)
-        
-        defer.returnValue(dict(
-            version=work.version,
-            previous_block_hash=work.previous_block,
-            transactions=[],
-            subsidy=subsidy,
-            time=work.timestamp,
-            target=work.block_target,
-        ))
-
-@deferral.retry('Error getting payout script from bitcoind:', 1)
-@defer.inlineCallbacks
-def get_payout_script(factory):
-    res = yield (yield factory.getProtocol()).check_order(order=bitcoin.p2p.Protocol.null_order)
-    if res['reply'] == 'success':
-        defer.returnValue(res['script'])
-    elif res['reply'] == 'denied':
-        defer.returnValue(None)
-    else:
-        raise ValueError('Unexpected reply: %r' % (res,))
+def getwork(bitcoind):
+    work = yield bitcoind.rpc_getmemorypool()
+    defer.returnValue(dict(
+        version=work['version'],
+        previous_block_hash=int(work['previousblockhash'], 16),
+        transactions=[bitcoin_data.tx_type.unpack(x.decode('hex')) for x in work['transactions']],
+        subsidy=work['coinbasevalue'],
+        time=work['time'],
+        target=bitcoin_data.FloatingIntegerType().unpack(work['bits'].decode('hex')[::-1]) if isinstance(work['bits'], (str, unicode)) else bitcoin_data.FloatingInteger(work['bits']),
+    ))
 
 @deferral.retry('Error creating payout script:', 10)
 @defer.inlineCallbacks
 def get_payout_script2(bitcoind, net):
-    defer.returnValue(bitcoin.data.pubkey_hash_to_script2(bitcoin.data.address_to_pubkey_hash((yield bitcoind.rpc_getaccountaddress('p2pool')), net)))
+    address = yield bitcoind.rpc_getaccountaddress('p2pool')
+    try:
+        pubkey = (yield bitcoind.rpc_validateaddress(address))['pubkey'].decode('hex')
+    except:
+        log.err()
+        defer.returnValue(bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(address, net)))
+    else:
+        defer.returnValue(bitcoin_data.pubkey_to_script2(pubkey))
 
 @defer.inlineCallbacks
-def main(args):
+def main(args, net):
     try:
-        print 'p2pool (version %s)' % (p2pool_init.__version__,)
+        print 'p2pool (version %s)' % (p2pool.__version__,)
         print
         try:
             from . import draw
@@ -87,49 +63,52 @@ def main(args):
             print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
             print
         
-        # connect to bitcoind over JSON-RPC and do initial getwork
+        # connect to bitcoind over JSON-RPC and do initial getmemorypool
         url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
         print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
         bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
-        good = yield deferral.retry('Error while checking bitcoind identity:', 1)(args.net.BITCOIN_RPC_CHECK)(bitcoind)
+        good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.BITCOIN_RPC_CHECK)(bitcoind)
         if not good:
             print "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
             return
-        temp_work = yield deferral.retry('Error while testing getwork:', 1)(defer.inlineCallbacks(lambda: defer.returnValue(bitcoin.getwork.BlockAttempt.from_getwork((yield bitcoind.rpc_getwork())))))()
+        temp_work = yield getwork(bitcoind)
         print '    ...success!'
-        print '    Current block hash: %x' % (temp_work.previous_block,)
+        print '    Current block hash: %x' % (temp_work['previous_block_hash'],)
         print
         
-        # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to
+        # connect to bitcoind over bitcoin-p2p
         print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
-        factory = bitcoin.p2p.ClientFactory(args.net)
+        factory = bitcoin_p2p.ClientFactory(net)
         reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
-        my_script = yield get_payout_script(factory)
+        yield factory.getProtocol() # waits until handshake is successful
+        print '    ...success!'
+        print
+        
         if args.pubkey_hash is None:
-            if my_script is None:
-                print '    IP transaction denied ... falling back to sending to address.'
-                my_script = yield get_payout_script2(bitcoind, args.net)
+            print 'Getting payout address from bitcoind...'
+            my_script = yield get_payout_script2(bitcoind, net)
         else:
-            my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash)
+            print 'Computing payout script from provided address....'
+            my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
         print '    ...success!'
-        print '    Payout script:', bitcoin.data.script2_to_human(my_script, args.net)
+        print '    Payout script:', bitcoin_data.script2_to_human(my_script, net)
         print
         
         print 'Loading cached block headers...'
-        ht = bitcoin.p2p.HeightTracker(factory, args.net.NAME + '_headers.dat')
+        ht = bitcoin_p2p.HeightTracker(factory, net.NAME + '_headers.dat')
         print '   ...done loading %i cached block headers.' % (len(ht.tracker.shares),)
         print
         
-        tracker = p2pool.OkayTracker(args.net)
-        ss = p2pool.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), args.net.NAME + '_shares.'), args.net)
+        tracker = p2pool_data.OkayTracker(net)
+        shared_share_hashes = set()
+        ss = p2pool_data.ShareStore(os.path.join(os.path.dirname(sys.argv[0]), net.NAME + '_shares.'), net)
         known_verified = set()
         print "Loading shares..."
         for i, (mode, contents) in enumerate(ss.get_shares()):
             if mode == 'share':
                 if contents.hash in tracker.shares:
                     continue
-                contents.shared = True
-                contents.stored = True
+                shared_share_hashes.add(contents.hash)
                 contents.time_seen = 0
                 tracker.add(contents)
                 if len(tracker.shares) % 1000 == 0 and tracker.shares:
@@ -150,6 +129,7 @@ def main(args):
         tracker.verified.added.watch(lambda share: ss.add_verified_hash(share.hash))
         tracker.removed.watch(lambda share: ss.forget_share(share.hash))
         tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
+        tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
         
         peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
         
@@ -164,7 +144,7 @@ def main(args):
         
         @defer.inlineCallbacks
         def set_real_work1():
-            work = yield getwork(bitcoind, ht, args.net)
+            work = yield getwork(bitcoind)
             changed = work['previous_block_hash'] != current_work.value['previous_block'] if current_work.value is not None else True
             current_work.set(dict(
                 version=work['version'],
@@ -174,6 +154,7 @@ def main(args):
                 aux_work=current_work.value['aux_work'] if current_work.value is not None else None,
             ))
             current_work2.set(dict(
+                time=work['time'],
                 transactions=work['transactions'],
                 subsidy=work['subsidy'],
                 clock_offset=time.time() - work['time'],
@@ -207,7 +188,7 @@ def main(args):
                     if peer is None:
                         continue
                 
-                print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
+                print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
                 peer.send_getshares(
                     hashes=[share_hash],
                     parents=2000,
@@ -233,7 +214,7 @@ def main(args):
                 x = dict(current_work.value)
                 x['aux_work'] = dict(
                     hash=int(auxblock['hash'], 16),
-                    target=bitcoin.data.HashType().unpack(auxblock['target'].decode('hex')),
+                    target=bitcoin_data.HashType().unpack(auxblock['target'].decode('hex')),
                     chain_id=auxblock['chainid'],
                 )
                 #print x['aux_work']
@@ -249,10 +230,10 @@ def main(args):
             for peer in p2p_node.peers.itervalues():
                 if peer is ignore_peer:
                     continue
-                #if p2pool_init.DEBUG:
-                #    print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr)
+                #if p2pool.DEBUG:
+                #    print "Sending share %s to %r" % (p2pool_data.format_hash(share.hash), peer.addr)
                 peer.sendShares([share])
-            share.flag_shared()
+            shared_share_hashes.add(share.hash)
         
         def p2p_shares(shares, peer=None):
             if len(shares) > 5:
@@ -261,12 +242,12 @@ def main(args):
             new_count = 0
             for share in shares:
                 if share.hash in tracker.shares:
-                    #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
+                    #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
                     continue
                 
                 new_count += 1
                 
-                #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
+                #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
                 
                 tracker.add(share)
             
@@ -277,18 +258,18 @@ def main(args):
                 set_real_work2()
             
             if len(shares) > 5:
-                print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*args.net.CHAIN_LENGTH)
+                print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.shares), 2*net.CHAIN_LENGTH)
         
         @tracker.verified.added.watch
         def _(share):
-            if share.bitcoin_hash <= share.header['target']:
-                print
-                print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
-                print
+            if share.pow_hash <= share.header['target']:
                 if factory.conn.value is not None:
-                    factory.conn.value.send_block(block=share.as_block(tracker, args.net))
+                    factory.conn.value.send_block(block=share.as_block(tracker, net))
                 else:
                     print 'No bitcoind connection! Erp!'
+                print
+                print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool_data.format_hash(share.hash), share.header_hash,)
+                print
         
         def p2p_share_hashes(share_hashes, peer):
             t = time.time()
@@ -299,7 +280,7 @@ def main(args):
                 last_request_time, count = requested.get(share_hash, (None, 0))
                 if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
                     continue
-                print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
+                print 'Got share hash, requesting! Hash: %s' % (p2pool_data.format_hash(share_hash),)
                 get_hashes.append(share_hash)
                 requested[share_hash] = t, count + 1
             
@@ -327,31 +308,31 @@ def main(args):
                 ip, port = x.split(':')
                 return ip, int(port)
             else:
-                return x, args.net.P2P_PORT
+                return x, net.P2P_PORT
         
         nodes = set([
-            ('72.14.191.28', args.net.P2P_PORT),
-            ('62.204.197.159', args.net.P2P_PORT),
-            ('142.58.248.28', args.net.P2P_PORT),
-            ('94.23.34.145', args.net.P2P_PORT),
+            ('72.14.191.28', net.P2P_PORT),
+            ('62.204.197.159', net.P2P_PORT),
+            ('142.58.248.28', net.P2P_PORT),
+            ('94.23.34.145', net.P2P_PORT),
         ])
         for host in [
             'p2pool.forre.st',
             'dabuttonfactory.com',
         ]:
             try:
-                nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT))
+                nodes.add(((yield reactor.resolve(host)), net.P2P_PORT))
             except:
                 log.err(None, 'Error resolving bootstrap node IP:')
 
-        if args.net_name == 'litecoin':
-            nodes.add(((yield reactor.resolve('liteco.in')), args.net.P2P_PORT))
+        if net.NAME == 'litecoin':
+            nodes.add(((yield reactor.resolve('liteco.in')), net.P2P_PORT))
         
         p2p_node = p2p.Node(
             current_work=current_work,
             port=args.p2pool_port,
-            net=args.net,
-            addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.NAME),
+            net=net,
+            addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), net.NAME),
             preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
         )
         p2p_node.handle_shares = p2p_shares
@@ -364,7 +345,7 @@ def main(args):
         def work_changed(new_work):
             #print 'Work changed:', new_work
             for share in tracker.get_chain_known(new_work['best_share_hash']):
-                if share.shared:
+                if share.hash in shared_share_hashes:
                     break
                 share_share(share, share.peer)
         current_work.changed.watch(work_changed)
@@ -383,7 +364,7 @@ def main(args):
                 except defer.TimeoutError:
                     pass
                 except:
-                    if p2pool_init.DEBUG:
+                    if p2pool.DEBUG:
                         log.err(None, "UPnP error:")
                 yield deferral.sleep(random.expovariate(1/120))
         
@@ -403,7 +384,7 @@ def main(args):
         removed_unstales = set()
         def get_share_counts(doa=False):
             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
-            matching_in_chain = share_counter(current_work.value['best_share_hash'], max(0, height - 1)) | removed_unstales
+            matching_in_chain = share_counter(current_work.value['best_share_hash'], height) | removed_unstales
             shares_in_chain = my_shares & matching_in_chain
             stale_shares = my_shares - matching_in_chain
             if doa:
@@ -415,19 +396,31 @@ def main(args):
         def _(share):
             if share.hash in my_shares and tracker.is_child_of(share.hash, current_work.value['best_share_hash']):
                 removed_unstales.add(share.hash)
-
-        def compute(state, payout_script):
+        
+        
+        def get_payout_script_from_username(request):
+            user = worker_interface.get_username(request)
+            if user is None:
+                return None
+            try:
+                return bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(user, net))
+            except: # XXX blah
+                return None
+        
+        def compute(request):
+            state = current_work.value
+            payout_script = get_payout_script_from_username(request)
             if payout_script is None or random.uniform(0, 100) < args.worker_fee:
                 payout_script = my_script
-            if state['best_share_hash'] is None and args.net.PERSIST:
+            if state['best_share_hash'] is None and net.PERSIST:
                 raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
-            if len(p2p_node.peers) == 0 and args.net.PERSIST:
+            if len(p2p_node.peers) == 0 and net.PERSIST:
                 raise jsonrpc.Error(-12345, u'p2pool is not connected to any peers')
             if time.time() > current_work2.value['last_update'] + 60:
                 raise jsonrpc.Error(-12345, u'lost contact with bitcoind')
             
             if state['aux_work'] is not None:
-                aux_str = '\xfa\xbemm' + bitcoin.data.HashType().pack(state['aux_work']['hash'])[::-1] + struct.pack('<ii', 1, 0)
+                aux_str = '\xfa\xbemm' + bitcoin_data.HashType().pack(state['aux_work']['hash'])[::-1] + struct.pack('<ii', 1, 0)
             else:
                 aux_str = ''
             
@@ -439,74 +432,76 @@ def main(args):
                 frac = stale_shares/shares
                 return 2*struct.pack('<H', int(65535*frac + .5))
             subsidy = current_work2.value['subsidy']
-            generate_tx = p2pool.generate_transaction(
+            
+            
+            timestamp = current_work2.value['time']
+            previous_share = tracker.shares[state['best_share_hash']] if state['best_share_hash'] is not None else None
+            new_share_info, generate_tx = p2pool_data.new_generate_transaction(
                 tracker=tracker,
-                previous_share_hash=state['best_share_hash'],
-                new_script=payout_script,
-                subsidy=subsidy,
-                nonce=run_identifier + struct.pack('<H', random.randrange(2**16)) + aux_str + get_stale_frac(),
+                new_share_data=dict(
+                    previous_share_hash=state['best_share_hash'],
+                    coinbase=aux_str,
+                    nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)),
+                    new_script=payout_script,
+                    subsidy=subsidy,
+                    donation=math.perfect_round(65535*args.donation_percentage/100),
+                    stale_frac=(lambda shares, stales:
+                        255 if shares == 0 else math.perfect_round(254*stales/shares)
+                    )(*get_share_counts()),
+                ),
                 block_target=state['target'],
-                net=args.net,
+                desired_timestamp=int(time.time() - current_work2.value['clock_offset']),
+                net=net,
             )
-            print 'New work for worker! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], (generate_tx['tx_outs'][-1]['value']-subsidy//200)*1e-8, args.net.BITCOIN_SYMBOL, subsidy*1e-8, args.net.BITCOIN_SYMBOL, len(current_work2.value['transactions']))
-            #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
+            
+            print 'New work for worker! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (bitcoin_data.target_to_difficulty(new_share_info['target']), (sum(t['value'] for t in generate_tx['tx_outs'] if t['script'] == payout_script) -subsidy//200)*1e-8, net.BITCOIN_SYMBOL, subsidy*1e-8, net.BITCOIN_SYMBOL, len(current_work2.value['transactions']))
+            #print 'Target: %x' % (p2pool_data.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],)
             #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000
             transactions = [generate_tx] + list(current_work2.value['transactions'])
-            merkle_root = bitcoin.data.merkle_hash(transactions)
-            merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds
+            merkle_root = bitcoin_data.merkle_hash(transactions)
+            merkle_root_to_transactions[merkle_root] = new_share_info, transactions
             
-            timestamp = int(time.time() - current_work2.value['clock_offset'])
-            if state['best_share_hash'] is not None:
-                timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1
-                if timestamp2 > timestamp:
-                    print 'Toff', timestamp2 - timestamp
-                    timestamp = timestamp2
-            target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
-            times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time()
-            #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
-            return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2)
+            target2 = new_share_info['target']
+            times[merkle_root] = time.time()
+            #print 'SENT', 2**256//p2pool_data.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target']
+            return bitcoin_getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2), state['best_share_hash']
         
         my_shares = set()
         doa_shares = set()
         times = {}
         
-        def got_response(data, user):
+        def got_response(header, request):
             try:
+                user = worker_interface.get_username(request)
                 # match up with transactions
-                header = bitcoin.getwork.decode_data(data)
-                transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
-                if transactions is None:
+                xxx = merkle_root_to_transactions.get(header['merkle_root'], None)
+                if xxx is None:
                     print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
                     return False
-                block = dict(header=header, txs=transactions)
-                hash_ = bitcoin.data.block_header_type.hash256(block['header'])
-                pow = hash_;
-
-                # use scrypt for Litecoin
-                if (getattr(args.net, 'BITCOIN_POW_SCRYPT', False)):
-                    pow = bitcoin.data.block_header_type.scrypt(block['header']);
-#                    print 'LTC: hash256 %x' % hash_
-#                    print 'LTC: scrypt  %x' % pow
-#                    print 'LTC: target  %x' % block['header']['target']
-#                    print 'LTC: starget %x' % p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
-
-                if pow <= block['header']['target'] or p2pool_init.DEBUG:
+                share_info, transactions = xxx
+                new_share_info = share_info
+                
+                hash_ = bitcoin_data.block_header_type.hash256(header)
+                
+                pow_hash = net.BITCOIN_POW_FUNC(header)
+                
+                if pow_hash <= header['target'] or p2pool.DEBUG:
                     if factory.conn.value is not None:
-                        factory.conn.value.send_block(block=block)
+                        factory.conn.value.send_block(block=dict(header=header, txs=transactions))
                     else:
                         print 'No bitcoind connection! Erp!'
-                    if pow <= block['header']['target']:
+                    if pow_hash <= header['target']:
                         print
                         print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
                         print
                 
-                if current_work.value['aux_work'] is not None and pow <= current_work.value['aux_work']['target']:
+                if current_work.value['aux_work'] is not None and pow_hash <= current_work.value['aux_work']['target']:
                     try:
                         aux_pow = dict(
                             merkle_tx=dict(
                                 tx=transactions[0],
                                 block_hash=hash_,
-                                merkle_branch=[x['hash'] for x in p2pool.calculate_merkle_branch(transactions, 0)],
+                                merkle_branch=[x['hash'] for x in p2pool_data.calculate_merkle_branch(transactions, 0)],
                                 index=0,
                             ),
                             merkle_branch=[],
@@ -514,7 +509,7 @@ def main(args):
                             parent_block_header=header,
                         )
                         
-                        a, b = transactions[0]['tx_ins'][0]['script'][-32-8:-8].encode('hex'), bitcoin.data.aux_pow_type.pack(aux_pow).encode('hex')
+                        a, b = transactions[0]['tx_ins'][0]['script'][-32-8:-8].encode('hex'), bitcoin_data.aux_pow_type.pack(aux_pow).encode('hex')
                         #print a, b
                         merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
                         def _(res):
@@ -523,15 +518,15 @@ def main(args):
                     except:
                         log.err(None, 'Error while processing merged mining POW:')
                 
-                target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
-                if pow > target:
-                    print 'Worker submitted share with hash > target:\nhash  : %x\ntarget: %x' % (pow, target)
+                target = new_share_info['target']
+                if pow_hash > target:
+                    print 'Worker submitted share with hash > target:\nhash  : %x\ntarget: %x' % (pow_hash, target)
                     return False
-                share = p2pool.Share.from_block(block, args.net)
+                share = p2pool_data.NewShare(net, header, new_share_info, other_txs=transactions[1:])
                 my_shares.add(share.hash)
                 if share.previous_hash != current_work.value['best_share_hash']:
                     doa_shares.add(share.hash)
-                print 'GOT SHARE! %s %s prev %s age %.2fs' % (user, p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
+                print 'GOT SHARE! %s %s prev %s age %.2fs' % (user, p2pool_data.format_hash(share.hash), p2pool_data.format_hash(share.previous_hash), time.time() - times[header['merkle_root']]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
                 good = share.previous_hash == current_work.value['best_share_hash']
                 # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
                 p2p_shares([share])
@@ -541,22 +536,22 @@ def main(args):
                 log.err(None, 'Error processing data received from worker:')
                 return False
         
-        web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net)
+        web_root = worker_interface.WorkerInterface(compute, got_response, current_work.changed)
         
         def get_rate():
             if current_work.value['best_share_hash'] is not None:
                 height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
-                att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 720))
-                fracs = [read_stale_frac(share) for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if read_stale_frac(share) is not None]
+                att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], net, min(height - 1, 720))
+                fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
                 return json.dumps(int(att_s / (1. - (math.median(fracs) if fracs else 0))))
             return json.dumps(None)
         
         def get_users():
             height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
-            weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256)
+            weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
             res = {}
             for script in sorted(weights, key=lambda s: weights[s]):
-                res[bitcoin.data.script2_to_human(script, args.net)] = weights[script]/total_weight
+                res[bitcoin_data.script2_to_human(script, net)] = weights[script]/total_weight
             return json.dumps(res)
         
         class WebInterface(resource.Resource):
@@ -623,14 +618,6 @@ def main(args):
             task.LoopingCall(signal.alarm, 30).start(1)
         
         
-        def read_stale_frac(share):
-            if len(share.nonce) < 4:
-                return None
-            a, b = struct.unpack("<HH", share.nonce[-4:])
-            if a == 0 or a != b:
-                return None
-            return a/65535
-
         pool_str = None;
         while True:
             yield deferral.sleep(3)
@@ -640,11 +627,11 @@ def main(args):
                 if current_work.value['best_share_hash'] is not None:
                     height, last = tracker.get_height_and_last(current_work.value['best_share_hash'])
                     if height > 2:
-                        att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height - 1, 720))
-                        weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**100)
+                        att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], net, min(height - 1, 720))
+                        weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256)
                         shares, stale_doa_shares, stale_not_doa_shares = get_share_counts(True)
                         stale_shares = stale_doa_shares + stale_not_doa_shares
-                        fracs = [read_stale_frac(share) for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if read_stale_frac(share) is not None]
+                        fracs = [share.stale_frac for share in itertools.islice(tracker.get_chain_known(current_work.value['best_share_hash']), 120) if share.stale_frac is not None]
                         str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i' % (
                             math.format(int(att_s / (1. - (math.median(fracs) if fracs else 0)))),
                             height,
@@ -656,7 +643,7 @@ def main(args):
                             stale_not_doa_shares,
                             stale_doa_shares,
                             len(p2p_node.peers),
-                        ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool_init.DEBUG else '')
+                        ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
                         if (str != pool_str):
                             print str;
                             pool_str = str;
@@ -710,11 +697,11 @@ def run():
         def convert_arg_line_to_args(self, arg_line):
             return [arg for arg in arg_line.split() if arg.strip()]
     
-    parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool_init.__version__,), fromfile_prefix_chars='@')
-    parser.add_argument('--version', action='version', version=p2pool_init.__version__)
+    parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
+    parser.add_argument('--version', action='version', version=p2pool.__version__)
     parser.add_argument('--net',
         help='use specified network (default: bitcoin)',
-        action='store', choices=sorted(x for x in p2pool.nets if 'testnet' not in x), default='bitcoin', dest='net_name')
+        action='store', choices=sorted(x for x in p2pool_data.nets if 'testnet' not in x), default='bitcoin', dest='net_name')
     parser.add_argument('--testnet',
         help='''use the network's testnet''',
         action='store_const', const=True, default=False, dest='testnet')
@@ -733,6 +720,9 @@ def run():
     parser.add_argument('--merged-userpass',
         help='merge daemon user and password, separated by a colon. Example: ncuser:ncpass',
         type=str, action='store', default=None, dest='merged_userpass')
+    parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
+        help='percentage amount to donate to author of p2pool. Default: 0.5',
+        type=float, action='store', default=0.5, dest='donation_percentage')
     
     p2pool_group = parser.add_argument_group('p2pool interface')
     p2pool_group.add_argument('--p2pool-port', metavar='PORT',
@@ -758,7 +748,7 @@ def run():
         help='connect to a bitcoind at this address (default: 127.0.0.1)',
         type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
     bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
-        help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getwork (default: 8332 ixcoin: 8338 i0coin: 7332 litecoin: 9332)',
+        help='connect to a bitcoind at this port over the RPC interface - used to get the current highest block via getmemorypool (default: 8332 ixcoin: 8338 i0coin: 7332 litecoin: 9332)',
         type=int, action='store', default=None, dest='bitcoind_rpc_port')
     bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
         help='connect to a bitcoind at this port over the p2p interface - used to submit blocks and get the pubkey to generate to via an IP transaction (default: 8333 namecoin: 8334 ixcoin: 8337 i0coin: 7333 solidcoin: 7555 litecoin: 9333, +10000 for testnets)',
@@ -774,10 +764,12 @@ def run():
     args = parser.parse_args()
     
     if args.debug:
-        p2pool_init.DEBUG = True
+        p2pool.DEBUG = True
+    
+    net = p2pool_data.nets[args.net_name + ('_testnet' if args.testnet else '')]
     
     if args.logfile is None:
-        args.logfile = os.path.join(os.path.dirname(sys.argv[0]), args.net_name + ('_testnet' if args.testnet else '') + '.log')
+        args.logfile = os.path.join(os.path.dirname(sys.argv[0]), net.NAME + '.log')
     
     class LogFile(object):
         def __init__(self, filename):
@@ -839,23 +831,21 @@ def run():
         signal.signal(signal.SIGUSR1, sigusr1)
     task.LoopingCall(logfile.reopen).start(5)
     
-    args.net = p2pool.nets[args.net_name + ('_testnet' if args.testnet else '')]
-    
     if args.bitcoind_rpc_port is None:
-        args.bitcoind_rpc_port = args.net.BITCOIN_RPC_PORT
+        args.bitcoind_rpc_port = net.BITCOIN_RPC_PORT
     
     if args.bitcoind_p2p_port is None:
-        args.bitcoind_p2p_port = args.net.BITCOIN_P2P_PORT
+        args.bitcoind_p2p_port = net.BITCOIN_P2P_PORT
     
     if args.p2pool_port is None:
-        args.p2pool_port = args.net.P2P_PORT
+        args.p2pool_port = net.P2P_PORT
     
     if args.worker_port is None:
-        args.worker_port = args.net.WORKER_PORT
+        args.worker_port = net.WORKER_PORT
     
     if args.address is not None:
         try:
-            args.pubkey_hash = bitcoin.data.address_to_pubkey_hash(args.address, args.net)
+            args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net)
         except Exception, e:
             parser.error('error parsing address: ' + repr(e))
     else:
@@ -864,5 +854,5 @@ def run():
     if (args.merged_url is None) ^ (args.merged_userpass is None):
         parser.error('must specify --merged-url and --merged-userpass')
     
-    reactor.callWhenRunning(main, args)
+    reactor.callWhenRunning(main, args, net)
     reactor.run()
index 1124e45..8b95081 100644 (file)
@@ -181,64 +181,30 @@ class Protocol(bitcoin_p2p.BaseProtocol):
     def handle_getshares(self, hashes, parents, stops):
         self.node.handle_get_shares(hashes, parents, stops, self)
     
-    message_share1as = bitcoin_data.ComposedType([
-        ('share1as', bitcoin_data.ListType(p2pool_data.share1a_type)),
+    message_shares = bitcoin_data.ComposedType([
+        ('shares', bitcoin_data.ListType(p2pool_data.new_share_type)),
     ])
-    def handle_share1as(self, share1as):
-        shares = []
-        for share1a in share1as:
-            # use scrypt for Litecoin
-            if (getattr(self.node.net, 'BITCOIN_POW_SCRYPT', False)):
-                hash_ = bitcoin_data.block_header_type.scrypt(share1a['header']);
-            else:
-                hash_ = bitcoin_data.block_header_type.hash256(share1a['header'])
-            if hash_ <= share1a['header']['target']:
-                print 'Dropping peer %s:%i due to invalid share' % self.addr
-                self.transport.loseConnection()
-                return
-            share = p2pool_data.Share.from_share1a(share1a, self.node.net)
-            share.peer = self # XXX
-            shares.append(share)
-        self.node.handle_shares(shares, self)
-    
-    message_share1bs = bitcoin_data.ComposedType([
-        ('share1bs', bitcoin_data.ListType(p2pool_data.share1b_type)),
-    ])
-    def handle_share1bs(self, share1bs):
-        shares = []
-        for share1b in share1bs:
-            # use scrypt for Litecoin
-            if (getattr(self.node.net, 'BITCOIN_POW_SCRYPT', False)):
-                hash_ = bitcoin_data.block_header_type.scrypt(share1b['header']);
-            else:
-                hash_ = bitcoin_data.block_header_type.hash256(share1b['header'])
-            if not hash_ <= share1b['header']['target']:
-                print 'Dropping peer %s:%i due to invalid share' % self.addr
-                self.transport.loseConnection()
-                return
-            share = p2pool_data.Share.from_share1b(share1b, self.node.net)
-            share.peer = self # XXX
-            shares.append(share)
-        self.node.handle_shares(shares, self)
-    
-    def sendShares(self, shares):
-        share1bs = []
-        share1as = []
+    def handle_shares(self, shares):
+        res = []
+        for share in shares:
+            share_obj = p2pool_data.NewShare.from_share(share, self.node.net)
+            share_obj.peer = self
+            res.append(share_obj)
+        self.node.handle_shares(res)
+    
+    def sendShares(self, shares, full=False):
+        new_shares = []
         # XXX doesn't need to send full block when it's not urgent
         # eg. when getting history
         for share in shares:
-            if share.bitcoin_hash <= share.header['target']:
-                share1bs.append(share.as_share1b())
-            else:
-                share1as.append(share.as_share1a())
+            new_shares.append(share.as_share())
         def att(f, **kwargs):
             try:
                 f(**kwargs)
             except bitcoin_p2p.TooLong:
                 att(f, **dict((k, v[:len(v)//2]) for k, v in kwargs.iteritems()))
                 att(f, **dict((k, v[len(v)//2:]) for k, v in kwargs.iteritems()))
-        if share1bs: att(self.send_share1bs, share1bs=share1bs)
-        if share1as: att(self.send_share1as, share1as=share1as)
+        if new_shares: att(self.send_shares, shares=new_shares)
     
     def connectionLost(self, reason):
         if self.connected2:
index 9e62188..b749119 100644 (file)
@@ -13,26 +13,28 @@ class WeightsSkipList(skiplist.SkipList):
     def get_delta(self, element):
         from p2pool.bitcoin import data as bitcoin_data
         if element is None:
-            return (2**256, {}, 0) # XXX
+            return (2**256, {}, 0, 0) # XXX
         share = self.tracker.shares[element]
         att = bitcoin_data.target_to_average_attempts(share.target)
-        return 1, {share.new_script: att}, att
+        return 1, {share.new_script: att*(65535-share.donation)}, att*65535, att*share.donation
     
-    def combine_deltas(self, (share_count1, weights1, total_weight1), (share_count2, weights2, total_weight2)):
-        return share_count1 + share_count2, math.add_dicts([weights1, weights2]), total_weight1 + total_weight2
+    def combine_deltas(self, (share_count1, weights1, total_weight1, total_donation_weight1), (share_count2, weights2, total_weight2, total_donation_weight2)):
+        return share_count1 + share_count2, math.add_dicts([weights1, weights2]), total_weight1 + total_weight2, total_donation_weight1 + total_donation_weight2
     
     def initial_solution(self, start, (max_shares, desired_weight)):
-        return 0, {}, 0
+        assert desired_weight % 65535 == 0, divmod(desired_weight, 65535)
+        return 0, {}, 0, 0
     
-    def apply_delta(self, (share_count1, weights1, total_weight1), (share_count2, weights2, total_weight2), (max_shares, desired_weight)):
-        if total_weight1 + total_weight2 > desired_weight and len(weights2) == 1:
+    def apply_delta(self, (share_count1, weights1, total_weight1, total_donation_weight1), (share_count2, weights2, total_weight2, total_donation_weight2), (max_shares, desired_weight)):
+        if total_weight1 + total_weight2 > desired_weight and share_count2 == 1:
             script, = weights2.iterkeys()
             new_weights = dict(weights1)
-            new_weights[script] = new_weights.get(script, 0) + desired_weight - total_weight1
-            return share_count1 + share_count2, new_weights, desired_weight
-        return share_count1 + share_count2, math.add_dicts([weights1, weights2]), total_weight1 + total_weight2
+            assert (desired_weight - total_weight1) % 65535 == 0
+            new_weights[script] = new_weights.get(script, 0) + (desired_weight - total_weight1)//65535*weights2[script]//(total_weight2//65535)
+            return share_count1 + share_count2, new_weights, desired_weight, total_donation_weight1 + (desired_weight - total_weight1)//65535*total_donation_weight2//(total_weight2//65535)
+        return share_count1 + share_count2, math.add_dicts([weights1, weights2]), total_weight1 + total_weight2, total_donation_weight1 + total_donation_weight2
     
-    def judge(self, (share_count, weights, total_weight), (max_shares, desired_weight)):
+    def judge(self, (share_count, weights, total_weight, total_donation_weight), (max_shares, desired_weight)):
         if share_count > max_shares or total_weight > desired_weight:
             return 1
         elif share_count == max_shares or total_weight == desired_weight:
@@ -40,8 +42,8 @@ class WeightsSkipList(skiplist.SkipList):
         else:
             return -1
     
-    def finalize(self, (share_count, weights, total_weight)):
-        return weights, total_weight
+    def finalize(self, (share_count, weights, total_weight, total_donation_weight)):
+        return weights, total_weight, total_donation_weight
 
 class CountsSkipList(skiplist.SkipList):
     # share_count, counts, total_count
index 6d63a0a..a70808f 100644 (file)
@@ -117,11 +117,8 @@ class Server(deferred_resource.DeferredResource):
                 raise Error(-32601, u'Method not found')
             method_meth = getattr(self, method_name)
             
-            if hasattr(method_meth, 'takes_request'):
-                params = [request] + list(params)
-            
             try:
-                result = yield method_meth(*params)
+                result = yield method_meth(request, *params)
             except Error:
                 raise
             except Exception:
index 6f7e62f..a1a6794 100644 (file)
@@ -64,6 +64,14 @@ def format(x):
     s = '' if count == 0 else prefixes[count - 1]
     return '%i' % (x,) + s
 
+def perfect_round(x):
+    a, b = divmod(x, 1)
+    a2 = int(a)
+    if random.random() >= b:
+        return a2
+    else:
+        return a2 + 1
+
 if __name__ == '__main__':
     import random
     a = 1
index 03e7e78..3e72e3d 100644 (file)
@@ -8,6 +8,7 @@ class Event(object):
         self.observers = {}
         self.id_generator = itertools.count()
         self._once = None
+        self.times = 0
     
     def watch(self, func):
         id = self.id_generator.next()
@@ -34,6 +35,8 @@ class Event(object):
         
         if once is not None:
             once.happened(*event)
+        
+        self.times += 1
     
     def get_deferred(self, timeout=None):
         once = self.once