fixed natural_to_string's encoding of 0
authorForrest Voight <forrest@forre.st>
Wed, 8 Aug 2012 06:11:43 +0000 (02:11 -0400)
committerForrest Voight <forrest@forre.st>
Wed, 8 Aug 2012 14:24:07 +0000 (10:24 -0400)
p2pool/test/util/test_math.py
p2pool/util/math.py

index d81d9a0..226b8e5 100644 (file)
@@ -21,7 +21,11 @@ class Test(unittest.TestCase):
         for i in xrange(10):
             alphabet = generate_alphabet()
             for i in xrange(100):
-                n = random.randrange(100000000000000000000000000000)
+                n = random.choice([
+                    random.randrange(3),
+                    random.randrange(300),
+                    random.randrange(100000000000000000000000000000),
+                ])
                 s = math.natural_to_string(n, alphabet)
                 n2 = math.string_to_natural(s, alphabet)
                 #print n, s.encode('hex'), n2
index c13494f..c45d1ec 100644 (file)
@@ -179,7 +179,7 @@ def natural_to_string(n, alphabet=None):
     if n < 0:
         raise TypeError('n must be a natural')
     if alphabet is None:
-        s = '%x' % (n,)
+        s = ('%x' % (n,)).lstrip('0')
         if len(s) % 2:
             s = '0' + s
         return s.decode('hex')