aboutsummaryrefslogtreecommitdiff
path: root/frozen_deps/ecdsa
diff options
context:
space:
mode:
authorDeterminant <[email protected]>2020-11-17 18:47:40 -0500
committerDeterminant <[email protected]>2020-11-17 18:47:40 -0500
commit3bef51eec2299403467e621ae660cef3f9256ac8 (patch)
tree9b72aaa95fb6dffacd8b20164699870a32ab6825 /frozen_deps/ecdsa
parent92b8b8e9628cac41d37226416107adc76b10e01b (diff)
update frozen deps
Diffstat (limited to 'frozen_deps/ecdsa')
-rw-r--r--frozen_deps/ecdsa/__init__.py76
-rw-r--r--frozen_deps/ecdsa/_compat.py60
-rw-r--r--frozen_deps/ecdsa/_rwlock.py86
-rw-r--r--frozen_deps/ecdsa/_version.py21
-rw-r--r--frozen_deps/ecdsa/curves.py194
-rw-r--r--frozen_deps/ecdsa/der.py409
-rw-r--r--frozen_deps/ecdsa/ecdh.py327
-rw-r--r--frozen_deps/ecdsa/ecdsa.py754
-rw-r--r--frozen_deps/ecdsa/ellipticcurve.py847
-rw-r--r--frozen_deps/ecdsa/keys.py1453
-rw-r--r--frozen_deps/ecdsa/numbertheory.py810
-rw-r--r--frozen_deps/ecdsa/rfc6979.py110
-rw-r--r--frozen_deps/ecdsa/test_der.py397
-rw-r--r--frozen_deps/ecdsa/test_ecdh.py370
-rw-r--r--frozen_deps/ecdsa/test_ecdsa.py538
-rw-r--r--frozen_deps/ecdsa/test_ellipticcurve.py200
-rw-r--r--frozen_deps/ecdsa/test_jacobi.py438
-rw-r--r--frozen_deps/ecdsa/test_keys.py454
-rw-r--r--frozen_deps/ecdsa/test_malformed_sigs.py350
-rw-r--r--frozen_deps/ecdsa/test_numbertheory.py326
-rw-r--r--frozen_deps/ecdsa/test_pyecdsa.py1989
-rw-r--r--frozen_deps/ecdsa/test_rw_lock.py177
-rw-r--r--frozen_deps/ecdsa/util.py435
23 files changed, 10821 insertions, 0 deletions
diff --git a/frozen_deps/ecdsa/__init__.py b/frozen_deps/ecdsa/__init__.py
new file mode 100644
index 0000000..4ae0a11
--- /dev/null
+++ b/frozen_deps/ecdsa/__init__.py
@@ -0,0 +1,76 @@
+from .keys import (
+ SigningKey,
+ VerifyingKey,
+ BadSignatureError,
+ BadDigestError,
+ MalformedPointError,
+)
+from .curves import (
+ NIST192p,
+ NIST224p,
+ NIST256p,
+ NIST384p,
+ NIST521p,
+ SECP256k1,
+ BRAINPOOLP160r1,
+ BRAINPOOLP192r1,
+ BRAINPOOLP224r1,
+ BRAINPOOLP256r1,
+ BRAINPOOLP320r1,
+ BRAINPOOLP384r1,
+ BRAINPOOLP512r1,
+)
+from .ecdh import (
+ ECDH,
+ NoKeyError,
+ NoCurveError,
+ InvalidCurveError,
+ InvalidSharedSecretError,
+)
+from .der import UnexpectedDER
+
+# This code comes from http://github.com/warner/python-ecdsa
+from ._version import get_versions
+
+__version__ = get_versions()["version"]
+del get_versions
+
+__all__ = [
+ "curves",
+ "der",
+ "ecdsa",
+ "ellipticcurve",
+ "keys",
+ "numbertheory",
+ "test_pyecdsa",
+ "util",
+ "six",
+]
+
+_hush_pyflakes = [
+ SigningKey,
+ VerifyingKey,
+ BadSignatureError,
+ BadDigestError,
+ MalformedPointError,
+ UnexpectedDER,
+ InvalidCurveError,
+ NoKeyError,
+ InvalidSharedSecretError,
+ ECDH,
+ NoCurveError,
+ NIST192p,
+ NIST224p,
+ NIST256p,
+ NIST384p,
+ NIST521p,
+ SECP256k1,
+ BRAINPOOLP160r1,
+ BRAINPOOLP192r1,
+ BRAINPOOLP224r1,
+ BRAINPOOLP256r1,
+ BRAINPOOLP320r1,
+ BRAINPOOLP384r1,
+ BRAINPOOLP512r1,
+]
+del _hush_pyflakes
diff --git a/frozen_deps/ecdsa/_compat.py b/frozen_deps/ecdsa/_compat.py
new file mode 100644
index 0000000..720360b
--- /dev/null
+++ b/frozen_deps/ecdsa/_compat.py
@@ -0,0 +1,60 @@
+"""
+Common functions for providing cross-python version compatibility.
+"""
+import sys
+import re
+from six import integer_types
+
+
+def str_idx_as_int(string, index):
+ """Take index'th byte from string, return as integer"""
+ val = string[index]
+ if isinstance(val, integer_types):
+ return val
+ return ord(val)
+
+
+if sys.version_info < (3, 0):
+
+ def normalise_bytes(buffer_object):
+ """Cast the input into array of bytes."""
+ # flake8 runs on py3 where `buffer` indeed doesn't exist...
+ return buffer(buffer_object) # noqa: F821
+
+ def hmac_compat(ret):
+ return ret
+
+ if sys.version_info < (2, 7) or sys.version_info < (2, 7, 4):
+
+ def remove_whitespace(text):
+ """Removes all whitespace from passed in string"""
+ return re.sub(r"\s+", "", text)
+
+ else:
+
+ def remove_whitespace(text):
+ """Removes all whitespace from passed in string"""
+ return re.sub(r"\s+", "", text, flags=re.UNICODE)
+
+
+else:
+ if sys.version_info < (3, 4):
+ # on python 3.3 hmac.hmac.update() accepts only bytes, on newer
+ # versions it does accept memoryview() also
+ def hmac_compat(data):
+ if not isinstance(data, bytes):
+ return bytes(data)
+ return data
+
+ else:
+
+ def hmac_compat(data):
+ return data
+
+ def normalise_bytes(buffer_object):
+ """Cast the input into array of bytes."""
+ return memoryview(buffer_object).cast("B")
+
+ def remove_whitespace(text):
+ """Removes all whitespace from passed in string"""
+ return re.sub(r"\s+", "", text, flags=re.UNICODE)
diff --git a/frozen_deps/ecdsa/_rwlock.py b/frozen_deps/ecdsa/_rwlock.py
new file mode 100644
index 0000000..010e498
--- /dev/null
+++ b/frozen_deps/ecdsa/_rwlock.py
@@ -0,0 +1,86 @@
+# Copyright Mateusz Kobos, (c) 2011
+# https://code.activestate.com/recipes/577803-reader-writer-lock-with-priority-for-writers/
+# released under the MIT licence
+
+import threading
+
+
+__author__ = "Mateusz Kobos"
+
+
+class RWLock:
+ """
+ Read-Write locking primitive
+
+ Synchronization object used in a solution of so-called second
+ readers-writers problem. In this problem, many readers can simultaneously
+ access a share, and a writer has an exclusive access to this share.
+ Additionally, the following constraints should be met:
+ 1) no reader should be kept waiting if the share is currently opened for
+ reading unless a writer is also waiting for the share,
+ 2) no writer should be kept waiting for the share longer than absolutely
+ necessary.
+
+ The implementation is based on [1, secs. 4.2.2, 4.2.6, 4.2.7]
+ with a modification -- adding an additional lock (C{self.__readers_queue})
+ -- in accordance with [2].
+
+ Sources:
+ [1] A.B. Downey: "The little book of semaphores", Version 2.1.5, 2008
+ [2] P.J. Courtois, F. Heymans, D.L. Parnas:
+ "Concurrent Control with 'Readers' and 'Writers'",
+ Communications of the ACM, 1971 (via [3])
+ [3] http://en.wikipedia.org/wiki/Readers-writers_problem
+ """
+
+ def __init__(self):
+ """
+ A lock giving an even higher priority to the writer in certain
+ cases (see [2] for a discussion).
+ """
+ self.__read_switch = _LightSwitch()
+ self.__write_switch = _LightSwitch()
+ self.__no_readers = threading.Lock()
+ self.__no_writers = threading.Lock()
+ self.__readers_queue = threading.Lock()
+
+ def reader_acquire(self):
+ self.__readers_queue.acquire()
+ self.__no_readers.acquire()
+ self.__read_switch.acquire(self.__no_writers)
+ self.__no_readers.release()
+ self.__readers_queue.release()
+
+ def reader_release(self):
+ self.__read_switch.release(self.__no_writers)
+
+ def writer_acquire(self):
+ self.__write_switch.acquire(self.__no_readers)
+ self.__no_writers.acquire()
+
+ def writer_release(self):
+ self.__no_writers.release()
+ self.__write_switch.release(self.__no_readers)
+
+
+class _LightSwitch:
+ """An auxiliary "light switch"-like object. The first thread turns on the
+ "switch", the last one turns it off (see [1, sec. 4.2.2] for details)."""
+
+ def __init__(self):
+ self.__counter = 0
+ self.__mutex = threading.Lock()
+
+ def acquire(self, lock):
+ self.__mutex.acquire()
+ self.__counter += 1
+ if self.__counter == 1:
+ lock.acquire()
+ self.__mutex.release()
+
+ def release(self, lock):
+ self.__mutex.acquire()
+ self.__counter -= 1
+ if self.__counter == 0:
+ lock.release()
+ self.__mutex.release()
diff --git a/frozen_deps/ecdsa/_version.py b/frozen_deps/ecdsa/_version.py
new file mode 100644
index 0000000..9a3e5fa
--- /dev/null
+++ b/frozen_deps/ecdsa/_version.py
@@ -0,0 +1,21 @@
+
+# This file was generated by 'versioneer.py' (0.17) from
+# revision-control system data, or from the parent directory name of an
+# unpacked source archive. Distribution tarballs contain a pre-generated copy
+# of this file.
+
+import json
+
+version_json = '''
+{
+ "date": "2020-11-12T20:12:49+0100",
+ "dirty": false,
+ "error": null,
+ "full-revisionid": "9d5a727c766773b8367f6dd0a49bbda21c18dbe7",
+ "version": "0.16.1"
+}
+''' # END VERSION_JSON
+
+
+def get_versions():
+ return json.loads(version_json)
diff --git a/frozen_deps/ecdsa/curves.py b/frozen_deps/ecdsa/curves.py
new file mode 100644
index 0000000..9a10380
--- /dev/null
+++ b/frozen_deps/ecdsa/curves.py
@@ -0,0 +1,194 @@
+from __future__ import division
+
+from . import der, ecdsa
+from .util import orderlen
+
+
+# orderlen was defined in this module previously, so keep it in __all__,
+# will need to mark it as deprecated later
+__all__ = [
+ "UnknownCurveError",
+ "orderlen",
+ "Curve",
+ "NIST192p",
+ "NIST224p",
+ "NIST256p",
+ "NIST384p",
+ "NIST521p",
+ "curves",
+ "find_curve",
+ "SECP256k1",
+ "BRAINPOOLP160r1",
+ "BRAINPOOLP192r1",
+ "BRAINPOOLP224r1",
+ "BRAINPOOLP256r1",
+ "BRAINPOOLP320r1",
+ "BRAINPOOLP384r1",
+ "BRAINPOOLP512r1",
+]
+
+
+class UnknownCurveError(Exception):
+ pass
+
+
+class Curve:
+ def __init__(self, name, curve, generator, oid, openssl_name=None):
+ self.name = name
+ self.openssl_name = openssl_name # maybe None
+ self.curve = curve
+ self.generator = generator
+ self.order = generator.order()
+ self.baselen = orderlen(self.order)
+ self.verifying_key_length = 2 * self.baselen
+ self.signature_length = 2 * self.baselen
+ self.oid = oid
+ self.encoded_oid = der.encode_oid(*oid)
+
+ def __repr__(self):
+ return self.name
+
+
+# the NIST curves
+NIST192p = Curve(
+ "NIST192p",
+ ecdsa.curve_192,
+ ecdsa.generator_192,
+ (1, 2, 840, 10045, 3, 1, 1),
+ "prime192v1",
+)
+
+
+NIST224p = Curve(
+ "NIST224p",
+ ecdsa.curve_224,
+ ecdsa.generator_224,
+ (1, 3, 132, 0, 33),
+ "secp224r1",
+)
+
+
+NIST256p = Curve(
+ "NIST256p",
+ ecdsa.curve_256,
+ ecdsa.generator_256,
+ (1, 2, 840, 10045, 3, 1, 7),
+ "prime256v1",
+)
+
+
+NIST384p = Curve(
+ "NIST384p",
+ ecdsa.curve_384,
+ ecdsa.generator_384,
+ (1, 3, 132, 0, 34),
+ "secp384r1",
+)
+
+
+NIST521p = Curve(
+ "NIST521p",
+ ecdsa.curve_521,
+ ecdsa.generator_521,
+ (1, 3, 132, 0, 35),
+ "secp521r1",
+)
+
+
+SECP256k1 = Curve(
+ "SECP256k1",
+ ecdsa.curve_secp256k1,
+ ecdsa.generator_secp256k1,
+ (1, 3, 132, 0, 10),
+ "secp256k1",
+)
+
+
+BRAINPOOLP160r1 = Curve(
+ "BRAINPOOLP160r1",
+ ecdsa.curve_brainpoolp160r1,
+ ecdsa.generator_brainpoolp160r1,
+ (1, 3, 36, 3, 3, 2, 8, 1, 1, 1),
+ "brainpoolP160r1",
+)
+
+
+BRAINPOOLP192r1 = Curve(
+ "BRAINPOOLP192r1",
+ ecdsa.curve_brainpoolp192r1,
+ ecdsa.generator_brainpoolp192r1,
+ (1, 3, 36, 3, 3, 2, 8, 1, 1, 3),
+ "brainpoolP192r1",
+)
+
+
+BRAINPOOLP224r1 = Curve(
+ "BRAINPOOLP224r1",
+ ecdsa.curve_brainpoolp224r1,
+ ecdsa.generator_brainpoolp224r1,
+ (1, 3, 36, 3, 3, 2, 8, 1, 1, 5),
+ "brainpoolP224r1",
+)
+
+
+BRAINPOOLP256r1 = Curve(
+ "BRAINPOOLP256r1",
+ ecdsa.curve_brainpoolp256r1,
+ ecdsa.generator_brainpoolp256r1,
+ (1, 3, 36, 3, 3, 2, 8, 1, 1, 7),
+ "brainpoolP256r1",
+)
+
+
+BRAINPOOLP320r1 = Curve(
+ "BRAINPOOLP320r1",
+ ecdsa.curve_brainpoolp320r1,
+ ecdsa.generator_brainpoolp320r1,
+ (1, 3, 36, 3, 3, 2, 8, 1, 1, 9),
+ "brainpoolP320r1",
+)
+
+
+BRAINPOOLP384r1 = Curve(
+ "BRAINPOOLP384r1",
+ ecdsa.curve_brainpoolp384r1,
+ ecdsa.generator_brainpoolp384r1,
+ (1, 3, 36, 3, 3, 2, 8, 1, 1, 11),
+ "brainpoolP384r1",
+)
+
+
+BRAINPOOLP512r1 = Curve(
+ "BRAINPOOLP512r1",
+ ecdsa.curve_brainpoolp512r1,
+ ecdsa.generator_brainpoolp512r1,
+ (1, 3, 36, 3, 3, 2, 8, 1, 1, 13),
+ "brainpoolP512r1",
+)
+
+
+curves = [
+ NIST192p,
+ NIST224p,
+ NIST256p,
+ NIST384p,
+ NIST521p,
+ SECP256k1,
+ BRAINPOOLP160r1,
+ BRAINPOOLP192r1,
+ BRAINPOOLP224r1,
+ BRAINPOOLP256r1,
+ BRAINPOOLP320r1,
+ BRAINPOOLP384r1,
+ BRAINPOOLP512r1,
+]
+
+
+def find_curve(oid_curve):
+ for c in curves:
+ if c.oid == oid_curve:
+ return c
+ raise UnknownCurveError(
+ "I don't know about the curve with oid %s."
+ "I only know about these: %s" % (oid_curve, [c.name for c in curves])
+ )
diff --git a/frozen_deps/ecdsa/der.py b/frozen_deps/ecdsa/der.py
new file mode 100644
index 0000000..8c1de9b
--- /dev/null
+++ b/frozen_deps/ecdsa/der.py
@@ -0,0 +1,409 @@
+from __future__ import division
+
+import binascii
+import base64
+import warnings
+from itertools import chain
+from six import int2byte, b, text_type
+from ._compat import str_idx_as_int
+
+
+class UnexpectedDER(Exception):
+ pass
+
+
+def encode_constructed(tag, value):
+ return int2byte(0xA0 + tag) + encode_length(len(value)) + value
+
+
+def encode_integer(r):
+ assert r >= 0 # can't support negative numbers yet
+ h = ("%x" % r).encode()
+ if len(h) % 2:
+ h = b("0") + h
+ s = binascii.unhexlify(h)
+ num = str_idx_as_int(s, 0)
+ if num <= 0x7F:
+ return b("\x02") + encode_length(len(s)) + s
+ else:
+ # DER integers are two's complement, so if the first byte is
+ # 0x80-0xff then we need an extra 0x00 byte to prevent it from
+ # looking negative.
+ return b("\x02") + encode_length(len(s) + 1) + b("\x00") + s
+
+
+# sentry object to check if an argument was specified (used to detect
+# deprecated calling convention)
+_sentry = object()
+
+
+def encode_bitstring(s, unused=_sentry):
+ """
+ Encode a binary string as a BIT STRING using :term:`DER` encoding.
+
+ Note, because there is no native Python object that can encode an actual
+ bit string, this function only accepts byte strings as the `s` argument.
+ The byte string is the actual bit string that will be encoded, padded
+ on the right (least significant bits, looking from big endian perspective)
+ to the first full byte. If the bit string has a bit length that is multiple
+ of 8, then the padding should not be included. For correct DER encoding
+ the padding bits MUST be set to 0.
+
+ Number of bits of padding need to be provided as the `unused` parameter.
+ In case they are specified as None, it means the number of unused bits
+ is already encoded in the string as the first byte.
+
+ The deprecated call convention specifies just the `s` parameters and
+ encodes the number of unused bits as first parameter (same convention
+ as with None).
+
+ Empty string must be encoded with `unused` specified as 0.
+
+ Future version of python-ecdsa will make specifying the `unused` argument
+ mandatory.
+
+ :param s: bytes to encode
+ :type s: bytes like object
+ :param unused: number of bits at the end of `s` that are unused, must be
+ between 0 and 7 (inclusive)
+ :type unused: int or None
+
+ :raises ValueError: when `unused` is too large or too small
+
+ :return: `s` encoded using DER
+ :rtype: bytes
+ """
+ encoded_unused = b""
+ len_extra = 0
+ if unused is _sentry:
+ warnings.warn(
+ "Legacy call convention used, unused= needs to be specified",
+ DeprecationWarning,
+ )
+ elif unused is not None:
+ if not 0 <= unused <= 7:
+ raise ValueError("unused must be integer between 0 and 7")
+ if unused:
+ if not s:
+ raise ValueError("unused is non-zero but s is empty")
+ last = str_idx_as_int(s, -1)
+ if last & (2 ** unused - 1):
+ raise ValueError("unused bits must be zeros in DER")
+ encoded_unused = int2byte(unused)
+ len_extra = 1
+ return b("\x03") + encode_length(len(s) + len_extra) + encoded_unused + s
+
+
+def encode_octet_string(s):
+ return b("\x04") + encode_length(len(s)) + s
+
+
+def encode_oid(first, second, *pieces):
+ assert 0 <= first < 2 and 0 <= second <= 39 or first == 2 and 0 <= second
+ body = b"".join(
+ chain(
+ [encode_number(40 * first + second)],
+ (encode_number(p) for p in pieces),
+ )
+ )
+ return b"\x06" + encode_length(len(body)) + body
+
+
+def encode_sequence(*encoded_pieces):
+ total_len = sum([len(p) for p in encoded_pieces])
+ return b("\x30") + encode_length(total_len) + b("").join(encoded_pieces)
+
+
+def encode_number(n):
+ b128_digits = []
+ while n:
+ b128_digits.insert(0, (n & 0x7F) | 0x80)
+ n = n >> 7
+ if not b128_digits:
+ b128_digits.append(0)
+ b128_digits[-1] &= 0x7F
+ return b("").join([int2byte(d) for d in b128_digits])
+
+
+def is_sequence(string):
+ return string and string[:1] == b"\x30"
+
+
+def remove_constructed(string):
+ s0 = str_idx_as_int(string, 0)
+ if (s0 & 0xE0) != 0xA0:
+ raise UnexpectedDER(
+ "wanted type 'constructed tag' (0xa0-0xbf), got 0x%02x" % s0
+ )
+ tag = s0 & 0x1F
+ length, llen = read_length(string[1:])
+ body = string[1 + llen : 1 + llen + length]
+ rest = string[1 + llen + length :]
+ return tag, body, rest
+
+
+def remove_sequence(string):
+ if not string:
+ raise UnexpectedDER("Empty string does not encode a sequence")
+ if string[:1] != b"\x30":
+ n = str_idx_as_int(string, 0)
+ raise UnexpectedDER("wanted type 'sequence' (0x30), got 0x%02x" % n)
+ length, lengthlength = read_length(string[1:])
+ if length > len(string) - 1 - lengthlength:
+ raise UnexpectedDER("Length longer than the provided buffer")
+ endseq = 1 + lengthlength + length
+ return string[1 + lengthlength : endseq], string[endseq:]
+
+
+def remove_octet_string(string):
+ if string[:1] != b"\x04":
+ n = str_idx_as_int(string, 0)
+ raise UnexpectedDER("wanted type 'octetstring' (0x04), got 0x%02x" % n)
+ length, llen = read_length(string[1:])
+ body = string[1 + llen : 1 + llen + length]
+ rest = string[1 + llen + length :]
+ return body, rest
+
+
+def remove_object(string):
+ if not string:
+ raise UnexpectedDER(
+ "Empty string does not encode an object identifier"
+ )
+ if string[:1] != b"\x06":
+ n = str_idx_as_int(string, 0)
+ raise UnexpectedDER("wanted type 'object' (0x06), got 0x%02x" % n)
+ length, lengthlength = read_length(string[1:])
+ body = string[1 + lengthlength : 1 + lengthlength + length]
+ rest = string[1 + lengthlength + length :]
+ if not body:
+ raise UnexpectedDER("Empty object identifier")
+ if len(body) != length:
+ raise UnexpectedDER(
+ "Length of object identifier longer than the provided buffer"
+ )
+ numbers = []
+ while body:
+ n, ll = read_number(body)
+ numbers.append(n)
+ body = body[ll:]
+ n0 = numbers.pop(0)
+ if n0 < 80:
+ first = n0 // 40
+ else:
+ first = 2
+ second = n0 - (40 * first)
+ numbers.insert(0, first)
+ numbers.insert(1, second)
+ return tuple(numbers), rest
+
+
+def remove_integer(string):
+ if not string:
+ raise UnexpectedDER(
+ "Empty string is an invalid encoding of an integer"
+ )
+ if string[:1] != b"\x02":
+ n = str_idx_as_int(string, 0)
+ raise UnexpectedDER("wanted type 'integer' (0x02), got 0x%02x" % n)
+ length, llen = read_length(string[1:])
+ if length > len(string) - 1 - llen:
+ raise UnexpectedDER("Length longer than provided buffer")
+ if length == 0:
+ raise UnexpectedDER("0-byte long encoding of integer")
+ numberbytes = string[1 + llen : 1 + llen + length]
+ rest = string[1 + llen + length :]
+ msb = str_idx_as_int(numberbytes, 0)
+ if not msb < 0x80:
+ raise UnexpectedDER("Negative integers are not supported")
+ # check if the encoding is the minimal one (DER requirement)
+ if length > 1 and not msb:
+ # leading zero byte is allowed if the integer would have been
+ # considered a negative number otherwise
+ smsb = str_idx_as_int(numberbytes, 1)
+ if smsb < 0x80:
+ raise UnexpectedDER(
+ "Invalid encoding of integer, unnecessary "
+ "zero padding bytes"
+ )
+ return int(binascii.hexlify(numberbytes), 16), rest
+
+
+def read_number(string):
+ number = 0
+ llen = 0
+ if str_idx_as_int(string, 0) == 0x80:
+ raise UnexpectedDER("Non minimal encoding of OID subidentifier")
+ # base-128 big endian, with most significant bit set in all but the last
+ # byte
+ while True:
+ if llen >= len(string):
+ raise UnexpectedDER("ran out of length bytes")
+ number = number << 7
+ d = str_idx_as_int(string, llen)
+ number += d & 0x7F
+ llen += 1
+ if not d & 0x80:
+ break
+ return number, llen
+
+
+def encode_length(l):
+ assert l >= 0
+ if l < 0x80:
+ return int2byte(l)
+ s = ("%x" % l).encode()
+ if len(s) % 2:
+ s = b("0") + s
+ s = binascii.unhexlify(s)
+ llen = len(s)
+ return int2byte(0x80 | llen) + s
+
+
+def read_length(string):
+ if not string:
+ raise UnexpectedDER("Empty string can't encode valid length value")
+ num = str_idx_as_int(string, 0)
+ if not (num & 0x80):
+ # short form
+ return (num & 0x7F), 1
+ # else long-form: b0&0x7f is number of additional base256 length bytes,
+ # big-endian
+ llen = num & 0x7F
+ if not llen:
+ raise UnexpectedDER("Invalid length encoding, length of length is 0")
+ if llen > len(string) - 1:
+ raise UnexpectedDER("Length of length longer than provided buffer")
+ # verify that the encoding is minimal possible (DER requirement)
+ msb = str_idx_as_int(string, 1)
+ if not msb or llen == 1 and msb < 0x80:
+ raise UnexpectedDER("Not minimal encoding of length")
+ return int(binascii.hexlify(string[1 : 1 + llen]), 16), 1 + llen
+
+
+def remove_bitstring(string, expect_unused=_sentry):
+ """
+ Remove a BIT STRING object from `string` following :term:`DER`.
+
+ The `expect_unused` can be used to specify if the bit string should
+ have the amount of unused bits decoded or not. If it's an integer, any
+ read BIT STRING that has number of unused bits different from specified
+ value will cause UnexpectedDER exception to be raised (this is especially
+ useful when decoding BIT STRINGS that have DER encoded object in them;
+ DER encoding is byte oriented, so the unused bits will always equal 0).
+
+ If the `expect_unused` is specified as None, the first element returned
+ will be a tuple, with the first value being the extracted bit string
+ while the second value will be the decoded number of unused bits.
+
+ If the `expect_unused` is unspecified, the decoding of byte with
+ number of unused bits will not be attempted and the bit string will be
+ returned as-is, the callee will be required to decode it and verify its
+ correctness.
+
+ Future version of python will require the `expected_unused` parameter
+ to be specified.
+
+ :param string: string of bytes to extract the BIT STRING from
+ :type string: bytes like object
+ :param expect_unused: number of bits that should be unused in the BIT
+ STRING, or None, to return it to caller
+ :type expect_unused: int or None
+
+ :raises UnexpectedDER: when the encoding does not follow DER.
+
+ :return: a tuple with first element being the extracted bit string and
+ the second being the remaining bytes in the string (if any); if the
+ `expect_unused` is specified as None, the first element of the returned
+ tuple will be a tuple itself, with first element being the bit string
+ as bytes and the second element being the number of unused bits at the
+ end of the byte array as an integer
+ :rtype: tuple
+ """
+ if not string:
+ raise UnexpectedDER("Empty string does not encode a bitstring")
+ if expect_unused is _sentry:
+ warnings.warn(
+ "Legacy call convention used, expect_unused= needs to be"
+ " specified",
+ DeprecationWarning,
+ )
+ num = str_idx_as_int(string, 0)
+ if string[:1] != b"\x03":
+ raise UnexpectedDER("wanted bitstring (0x03), got 0x%02x" % num)
+ length, llen = read_length(string[1:])
+ if not length:
+ raise UnexpectedDER("Invalid length of bit string, can't be 0")
+ body = string[1 + llen : 1 + llen + length]
+ rest = string[1 + llen + length :]
+ if expect_unused is not _sentry:
+ unused = str_idx_as_int(body, 0)
+ if not 0 <= unused <= 7:
+ raise UnexpectedDER("Invalid encoding of unused bits")
+ if expect_unused is not None and expect_unused != unused:
+ raise UnexpectedDER("Unexpected number of unused bits")
+ body = body[1:]
+ if unused:
+ if not body:
+ raise UnexpectedDER("Invalid encoding of empty bit string")
+ last = str_idx_as_int(body, -1)
+ # verify that all the unused bits are set to zero (DER requirement)
+ if last & (2 ** unused - 1):
+ raise UnexpectedDER("Non zero padding bits in bit string")
+ if expect_unused is None:
+ body = (body, unused)
+ return body, rest
+
+
+# SEQUENCE([1, STRING(secexp), cont[0], OBJECT(curvename), cont[1], BINTSTRING)
+
+
+# signatures: (from RFC3279)
+# ansi-X9-62 OBJECT IDENTIFIER ::= {
+# iso(1) member-body(2) us(840) 10045 }
+#
+# id-ecSigType OBJECT IDENTIFIER ::= {
+# ansi-X9-62 signatures(4) }
+# ecdsa-with-SHA1 OBJECT IDENTIFIER ::= {
+# id-ecSigType 1 }
+# so 1,2,840,10045,4,1
+# so 0x42, .. ..
+
+# Ecdsa-Sig-Value ::= SEQUENCE {
+# r INTEGER,
+# s INTEGER }
+
+# id-public-key-type OBJECT IDENTIFIER ::= { ansi-X9.62 2 }
+#
+# id-ecPublicKey OBJECT IDENTIFIER ::= { id-publicKeyType 1 }
+
+# I think the secp224r1 identifier is (t=06,l=05,v=2b81040021)
+# secp224r1 OBJECT IDENTIFIER ::= {
+# iso(1) identified-organization(3) certicom(132) curve(0) 33 }
+# and the secp384r1 is (t=06,l=05,v=2b81040022)
+# secp384r1 OBJECT IDENTIFIER ::= {
+# iso(1) identified-organization(3) certicom(132) curve(0) 34 }
+
+
+def unpem(pem):
+ if isinstance(pem, text_type):
+ pem = pem.encode()
+
+ d = b("").join(
+ [
+ l.strip()
+ for l in pem.split(b("\n"))
+ if l and not l.startswith(b("-----"))
+ ]
+ )
+ return base64.b64decode(d)
+
+
+def topem(der, name):
+ b64 = base64.b64encode(der)
+ lines = [("-----BEGIN %s-----\n" % name).encode()]
+ lines.extend(
+ [b64[start : start + 64] + b("\n") for start in range(0, len(b64), 64)]
+ )
+ lines.append(("-----END %s-----\n" % name).encode())
+ return b("").join(lines)
diff --git a/frozen_deps/ecdsa/ecdh.py b/frozen_deps/ecdsa/ecdh.py
new file mode 100644
index 0000000..9173279
--- /dev/null
+++ b/frozen_deps/ecdsa/ecdh.py
@@ -0,0 +1,327 @@
+"""
+Class for performing Elliptic-curve Diffie-Hellman (ECDH) operations.
+"""
+
+from .util import number_to_string
+from .ellipticcurve import INFINITY
+from .keys import SigningKey, VerifyingKey
+
+
+__all__ = [
+ "ECDH",
+ "NoKeyError",
+ "NoCurveError",
+ "InvalidCurveError",
+ "InvalidSharedSecretError",
+]
+
+
+class NoKeyError(Exception):
+ """ECDH. Key not found but it is needed for operation."""
+
+ pass
+
+
+class NoCurveError(Exception):
+ """ECDH. Curve not set but it is needed for operation."""
+
+ pass
+
+
+class InvalidCurveError(Exception):
+ """
+ ECDH. Raised in case the public and private keys use different curves.
+ """
+
+ pass
+
+
+class InvalidSharedSecretError(Exception):
+ """ECDH. Raised in case the shared secret we obtained is an INFINITY."""
+
+ pass
+
+
+class ECDH(object):
+ """
+ Elliptic-curve Diffie-Hellman (ECDH). A key agreement protocol.
+
+ Allows two parties, each having an elliptic-curve public-private key
+ pair, to establish a shared secret over an insecure channel
+ """
+
+ def __init__(self, curve=None, private_key=None, public_key=None):
+ """
+ ECDH init.
+
+ Call can be initialised without parameters, then the first operation
+ (loading either key) will set the used curve.
+ All parameters must be ultimately set before shared secret
+ calculation will be allowed.
+
+ :param curve: curve for operations
+ :type curve: Curve
+ :param private_key: `my` private key for ECDH
+ :type private_key: SigningKey
+ :param public_key: `their` public key for ECDH
+ :type public_key: VerifyingKey
+ """
+ self.curve = curve
+ self.private_key = None
+ self.public_key = None
+ if private_key:
+ self.load_private_key(private_key)
+ if public_key:
+ self.load_received_public_key(public_key)
+
+ def _get_shared_secret(self, remote_public_key):
+ if not self.private_key:
+ raise NoKeyError(
+ "Private key needs to be set to create shared secret"
+ )
+ if not self.public_key:
+ raise NoKeyError(
+ "Public key needs to be set to create shared secret"
+ )
+ if not (
+ self.private_key.curve == self.curve == remote_public_key.curve
+ ):
+ raise InvalidCurveError(
+ "Curves for public key and private key is not equal."
+ )
+
+ # shared secret = PUBKEYtheirs * PRIVATEKEYours
+ result = (
+ remote_public_key.pubkey.point
+ * self.private_key.privkey.secret_multiplier
+ )
+ if result == INFINITY:
+ raise InvalidSharedSecretError("Invalid shared secret (INFINITY).")
+
+ return result.x()
+
+ def set_curve(self, key_curve):
+ """
+ Set the working curve for ecdh operations.
+
+ :param key_curve: curve from `curves` module
+ :type key_curve: Curve
+ """
+ self.curve = key_curve
+
+ def generate_private_key(self):
+ """
+ Generate local private key for ecdh operation with curve that was set.
+
+ :raises NoCurveError: Curve must be set before key generation.
+
+ :return: public (verifying) key from this private key.
+ :rtype: VerifyingKey object
+ """
+ if not self.curve:
+ raise NoCurveError("Curve must be set prior to key generation.")
+ return self.load_private_key(SigningKey.generate(curve=self.curve))
+
+ def load_private_key(self, private_key):
+ """
+ Load private key from SigningKey (keys.py) object.
+
+ Needs to have the same curve as was set with set_curve method.
+ If curve is not set - it sets from this SigningKey
+
+ :param private_key: Initialised SigningKey class
+ :type private_key: SigningKey
+
+ :raises InvalidCurveError: private_key curve not the same as self.curve
+
+ :return: public (verifying) key from this private key.
+ :rtype: VerifyingKey object
+ """
+ if not self.curve:
+ self.curve = private_key.curve
+ if self.curve != private_key.curve:
+ raise InvalidCurveError("Curve mismatch.")
+ self.private_key = private_key
+ return self.private_key.get_verifying_key()
+
+ def load_private_key_bytes(self, private_key):
+ """
+ Load private key from byte string.
+
+ Uses current curve and checks if the provided key matches
+ the curve of ECDH key agreement.
+ Key loads via from_string method of SigningKey class
+
+ :param private_key: private key in bytes string format
+ :type private_key: :term:`bytes-like object`
+
+ :raises NoCurveError: Curve must be set before loading.
+
+ :return: public (verifying) key from this private key.
+ :rtype: VerifyingKey object
+ """
+ if not self.curve:
+ raise NoCurveError("Curve must be set prior to key load.")
+ return self.load_private_key(
+ SigningKey.from_string(private_key, curve=self.curve)
+ )
+
+ def load_private_key_der(self, private_key_der):
+ """
+ Load private key from DER byte string.
+
+ Compares the curve of the DER-encoded key with the ECDH set curve,
+ uses the former if unset.
+
+ Note, the only DER format supported is the RFC5915
+ Look at keys.py:SigningKey.from_der()
+
+ :param private_key_der: string with the DER encoding of private ECDSA
+ key
+ :type private_key_der: string
+
+ :raises InvalidCurveError: private_key curve not the same as self.curve
+
+ :return: public (verifying) key from this private key.
+ :rtype: VerifyingKey object
+ """
+ return self.load_private_key(SigningKey.from_der(private_key_der))
+
+ def load_private_key_pem(self, private_key_pem):
+ """
+ Load private key from PEM string.
+
+ Compares the curve of the DER-encoded key with the ECDH set curve,
+ uses the former if unset.
+
+ Note, the only PEM format supported is the RFC5915
+ Look at keys.py:SigningKey.from_pem()
+ it needs to have `EC PRIVATE KEY` section
+
+ :param private_key_pem: string with PEM-encoded private ECDSA key
+ :type private_key_pem: string
+
+ :raises InvalidCurveError: private_key curve not the same as self.curve
+
+ :return: public (verifying) key from this private key.
+ :rtype: VerifyingKey object
+ """
+ return self.load_private_key(SigningKey.from_pem(private_key_pem))
+
+ def get_public_key(self):
+ """
+ Provides a public key that matches the local private key.
+
+ Needs to be sent to the remote party.
+
+ :return: public (verifying) key from local private key.
+ :rtype: VerifyingKey object
+ """
+ return self.private_key.get_verifying_key()
+
+ def load_received_public_key(self, public_key):
+ """
+ Load public key from VerifyingKey (keys.py) object.
+
+ Needs to have the same curve as set as current for ecdh operation.
+ If curve is not set - it sets it from VerifyingKey.
+
+ :param public_key: Initialised VerifyingKey class
+ :type public_key: VerifyingKey
+
+ :raises InvalidCurveError: public_key curve not the same as self.curve
+ """
+ if not self.curve:
+ self.curve = public_key.curve
+ if self.curve != public_key.curve:
+ raise InvalidCurveError("Curve mismatch.")
+ self.public_key = public_key
+
+ def load_received_public_key_bytes(self, public_key_str):
+ """
+ Load public key from byte string.
+
+ Uses current curve and checks if key length corresponds to
+ the current curve.
+ Key loads via from_string method of VerifyingKey class
+
+ :param public_key_str: public key in bytes string format
+ :type public_key_str: :term:`bytes-like object`
+ """
+ return self.load_received_public_key(
+ VerifyingKey.from_string(public_key_str, self.curve)
+ )
+
+ def load_received_public_key_der(self, public_key_der):
+ """
+ Load public key from DER byte string.
+
+ Compares the curve of the DER-encoded key with the ECDH set curve,
+ uses the former if unset.
+
+ Note, the only DER format supported is the RFC5912
+ Look at keys.py:VerifyingKey.from_der()
+
+ :param public_key_der: string with the DER encoding of public ECDSA key
+ :type public_key_der: string
+
+ :raises InvalidCurveError: public_key curve not the same as self.curve
+ """
+ return self.load_received_public_key(
+ VerifyingKey.from_der(public_key_der)
+ )
+
+ def load_received_public_key_pem(self, public_key_pem):
+ """
+ Load public key from PEM string.
+
+ Compares the curve of the PEM-encoded key with the ECDH set curve,
+ uses the former if unset.
+
+ Note, the only PEM format supported is the RFC5912
+ Look at keys.py:VerifyingKey.from_pem()
+
+ :param public_key_pem: string with PEM-encoded public ECDSA key
+ :type public_key_pem: string
+
+ :raises InvalidCurveError: public_key curve not the same as self.curve
+ """
+ return self.load_received_public_key(
+ VerifyingKey.from_pem(public_key_pem)
+ )
+
+ def generate_sharedsecret_bytes(self):
+ """
+ Generate shared secret from local private key and remote public key.
+
+ The objects needs to have both private key and received public key
+ before generation is allowed.
+
+ :raises InvalidCurveError: public_key curve not the same as self.curve
+ :raises NoKeyError: public_key or private_key is not set
+
+ :return: shared secret
+ :rtype: byte string
+ """
+ return number_to_string(
+ self.generate_sharedsecret(), self.private_key.curve.order
+ )
+
+ def generate_sharedsecret(self):
+ """
+ Generate shared secret from local private key and remote public key.
+
+ The objects needs to have both private key and received public key
+ before generation is allowed.
+
+ It's the same for local and remote party.
+ shared secret(local private key, remote public key ) ==
+ shared secret (local public key, remote private key)
+
+ :raises InvalidCurveError: public_key curve not the same as self.curve
+ :raises NoKeyError: public_key or private_key is not set
+
+ :return: shared secret
+ :rtype: int
+ """
+ return self._get_shared_secret(self.public_key)
diff --git a/frozen_deps/ecdsa/ecdsa.py b/frozen_deps/ecdsa/ecdsa.py
new file mode 100644
index 0000000..d785a45
--- /dev/null
+++ b/frozen_deps/ecdsa/ecdsa.py
@@ -0,0 +1,754 @@
+#! /usr/bin/env python
+
+"""
+Implementation of Elliptic-Curve Digital Signatures.
+
+Classes and methods for elliptic-curve signatures:
+private keys, public keys, signatures,
+NIST prime-modulus curves with modulus lengths of
+192, 224, 256, 384, and 521 bits.
+
+Example:
+
+ # (In real-life applications, you would probably want to
+ # protect against defects in SystemRandom.)
+ from random import SystemRandom
+ randrange = SystemRandom().randrange
+
+ # Generate a public/private key pair using the NIST Curve P-192:
+
+ g = generator_192
+ n = g.order()
+ secret = randrange( 1, n )
+ pubkey = Public_key( g, g * secret )
+ privkey = Private_key( pubkey, secret )
+
+ # Signing a hash value:
+
+ hash = randrange( 1, n )
+ signature = privkey.sign( hash, randrange( 1, n ) )
+
+ # Verifying a signature for a hash value:
+
+ if pubkey.verifies( hash, signature ):
+ print_("Demo verification succeeded.")
+ else:
+ print_("*** Demo verification failed.")
+
+ # Verification fails if the hash value is modified:
+
+ if pubkey.verifies( hash-1, signature ):
+ print_("**** Demo verification failed to reject tampered hash.")
+ else:
+ print_("Demo verification correctly rejected tampered hash.")
+
+Version of 2009.05.16.
+
+Revision history:
+ 2005.12.31 - Initial version.
+ 2008.11.25 - Substantial revisions introducing new classes.
+ 2009.05.16 - Warn against using random.randrange in real applications.
+ 2009.05.17 - Use random.SystemRandom by default.
+
+Written in 2005 by Peter Pearson and placed in the public domain.
+"""
+
+from six import int2byte, b
+from . import ellipticcurve
+from . import numbertheory
+from .util import bit_length
+from ._compat import remove_whitespace
+
+
+class RSZeroError(RuntimeError):
+ pass
+
+
+class InvalidPointError(RuntimeError):
+ pass
+
+
+class Signature(object):
+ """ECDSA signature."""
+
+ def __init__(self, r, s):
+ self.r = r
+ self.s = s
+
+ def recover_public_keys(self, hash, generator):
+ """Returns two public keys for which the signature is valid
+ hash is signed hash
+ generator is the used generator of the signature
+ """
+ curve = generator.curve()
+ n = generator.order()
+ r = self.r
+ s = self.s
+ e = hash
+ x = r
+
+ # Compute the curve point with x as x-coordinate
+ alpha = (
+ pow(x, 3, curve.p()) + (curve.a() * x) + curve.b()
+ ) % curve.p()
+ beta = numbertheory.square_root_mod_prime(alpha, curve.p())
+ y = beta if beta % 2 == 0 else curve.p() - beta
+
+ # Compute the public key
+ R1 = ellipticcurve.PointJacobi(curve, x, y, 1, n)
+ Q1 = numbertheory.inverse_mod(r, n) * (s * R1 + (-e % n) * generator)
+ Pk1 = Public_key(generator, Q1)
+
+ # And the second solution
+ R2 = ellipticcurve.PointJacobi(curve, x, -y, 1, n)
+ Q2 = numbertheory.inverse_mod(r, n) * (s * R2 + (-e % n) * generator)
+ Pk2 = Public_key(generator, Q2)
+
+ return [Pk1, Pk2]
+
+
+class Public_key(object):
+ """Public key for ECDSA."""
+
+ def __init__(self, generator, point, verify=True):
+ """Low level ECDSA public key object.
+
+ :param generator: the Point that generates the group (the base point)
+ :param point: the Point that defines the public key
+ :param bool verify: if True check if point is valid point on curve
+
+ :raises InvalidPointError: if the point parameters are invalid or
+ point does not lie on the curve
+ """
+
+ self.curve = generator.curve()
+ self.generator = generator
+ self.point = point
+ n = generator.order()
+ p = self.curve.p()
+ if not (0 <= point.x() < p) or not (0 <= point.y() < p):
+ raise InvalidPointError(
+ "The public point has x or y out of range."
+ )
+ if verify and not self.curve.contains_point(point.x(), point.y()):
+ raise InvalidPointError("Point does not lie on the curve")
+ if not n:
+ raise InvalidPointError("Generator point must have order.")
+ # for curve parameters with base point with cofactor 1, all points
+ # that are on the curve are scalar multiples of the base point, so
+ # verifying that is not necessary. See Section 3.2.2.1 of SEC 1 v2
+ if (
+ verify
+ and self.curve.cofactor() != 1
+ and not n * point == ellipticcurve.INFINITY
+ ):
+ raise InvalidPointError("Generator point order is bad.")
+
+ def __eq__(self, other):
+ if isinstance(other, Public_key):
+ """Return True if the points are identical, False otherwise."""
+ return self.curve == other.curve and self.point == other.point
+ return NotImplemented
+
+ def verifies(self, hash, signature):
+ """Verify that signature is a valid signature of hash.
+ Return True if the signature is valid.
+ """
+
+ # From X9.62 J.3.1.
+
+ G = self.generator
+ n = G.order()
+ r = signature.r
+ s = signature.s
+ if r < 1 or r > n - 1:
+ return False
+ if s < 1 or s > n - 1:
+ return False
+ c = numbertheory.inverse_mod(s, n)
+ u1 = (hash * c) % n
+ u2 = (r * c) % n
+ if hasattr(G, "mul_add"):
+ xy = G.mul_add(u1, self.point, u2)
+ else:
+ xy = u1 * G + u2 * self.point
+ v = xy.x() % n
+ return v == r
+
+
+class Private_key(object):
+ """Private key for ECDSA."""
+
+ def __init__(self, public_key, secret_multiplier):
+ """public_key is of class Public_key;
+ secret_multiplier is a large integer.
+ """
+
+ self.public_key = public_key
+ self.secret_multiplier = secret_multiplier
+
+ def __eq__(self, other):
+ if isinstance(other, Private_key):
+ """Return True if the points are identical, False otherwise."""
+ return (
+ self.public_key == other.public_key
+ and self.secret_multiplier == other.secret_multiplier
+ )
+ return NotImplemented
+
+ def sign(self, hash, random_k):
+ """Return a signature for the provided hash, using the provided
+ random nonce. It is absolutely vital that random_k be an unpredictable
+ number in the range [1, self.public_key.point.order()-1]. If
+ an attacker can guess random_k, he can compute our private key from a
+ single signature. Also, if an attacker knows a few high-order
+ bits (or a few low-order bits) of random_k, he can compute our private
+ key from many signatures. The generation of nonces with adequate
+ cryptographic strength is very difficult and far beyond the scope
+ of this comment.
+
+ May raise RuntimeError, in which case retrying with a new
+ random value k is in order.
+ """
+
+ G = self.public_key.generator
+ n = G.order()
+ k = random_k % n
+ # Fix the bit-length of the random nonce,
+ # so that it doesn't leak via timing.
+ # This does not change that ks = k mod n
+ ks = k + n
+ kt = ks + n
+ if bit_length(ks) == bit_length(n):
+ p1 = kt * G
+ else:
+ p1 = ks * G
+ r = p1.x() % n
+ if r == 0:
+ raise RSZeroError("amazingly unlucky random number r")
+ s = (
+ numbertheory.inverse_mod(k, n)
+ * (hash + (self.secret_multiplier * r) % n)
+ ) % n
+ if s == 0:
+ raise RSZeroError("amazingly unlucky random number s")
+ return Signature(r, s)
+
+
+def int_to_string(x):
+ """Convert integer x into a string of bytes, as per X9.62."""
+ assert x >= 0
+ if x == 0:
+ return b("\0")
+ result = []
+ while x:
+ ordinal = x & 0xFF
+ result.append(int2byte(ordinal))
+ x >>= 8
+
+ result.reverse()
+ return b("").join(result)
+
+
+def string_to_int(s):
+ """Convert a string of bytes into an integer, as per X9.62."""
+ result = 0
+ for c in s:
+ if not isinstance(c, int):
+ c = ord(c)
+ result = 256 * result + c
+ return result
+
+
+def digest_integer(m):
+ """Convert an integer into a string of bytes, compute
+ its SHA-1 hash, and convert the result to an integer."""
+ #
+ # I don't expect this function to be used much. I wrote
+ # it in order to be able to duplicate the examples
+ # in ECDSAVS.
+ #
+ from hashlib import sha1
+
+ return string_to_int(sha1(int_to_string(m)).digest())
+
+
+def point_is_valid(generator, x, y):
+ """Is (x,y) a valid public key based on the specified generator?"""
+
+ # These are the tests specified in X9.62.
+
+ n = generator.order()
+ curve = generator.curve()
+ p = curve.p()
+ if not (0 <= x < p) or not (0 <= y < p):
+ return False
+ if not curve.contains_point(x, y):
+ return False
+ if (
+ curve.cofactor() != 1
+ and not n * ellipticcurve.PointJacobi(curve, x, y, 1)
+ == ellipticcurve.INFINITY
+ ):
+ return False
+ return True
+
+
+# NIST Curve P-192:
+_p = 6277101735386680763835789423207666416083908700390324961279
+_r = 6277101735386680763835789423176059013767194773182842284081
+# s = 0x3045ae6fc8422f64ed579528d38120eae12196d5L
+# c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65L
+_b = int(
+ remove_whitespace(
+ """
+ 64210519 E59C80E7 0FA7E9AB 72243049 FEB8DEEC C146B9B1"""
+ ),
+ 16,
+)
+_Gx = int(
+ remove_whitespace(
+ """
+ 188DA80E B03090F6 7CBF20EB 43A18800 F4FF0AFD 82FF1012"""
+ ),
+ 16,
+)
+_Gy = int(
+ remove_whitespace(
+ """
+ 07192B95 FFC8DA78 631011ED 6B24CDD5 73F977A1 1E794811"""
+ ),
+ 16,
+)
+
+curve_192 = ellipticcurve.CurveFp(_p, -3, _b, 1)
+generator_192 = ellipticcurve.PointJacobi(
+ curve_192, _Gx, _Gy, 1, _r, generator=True
+)
+
+
+# NIST Curve P-224:
+_p = int(
+ remove_whitespace(
+ """
+ 2695994666715063979466701508701963067355791626002630814351
+ 0066298881"""
+ )
+)
+_r = int(
+ remove_whitespace(
+ """
+ 2695994666715063979466701508701962594045780771442439172168
+ 2722368061"""
+ )
+)
+# s = 0xbd71344799d5c7fcdc45b59fa3b9ab8f6a948bc5L
+# c = 0x5b056c7e11dd68f40469ee7f3c7a7d74f7d121116506d031218291fbL
+_b = int(
+ remove_whitespace(
+ """
+ B4050A85 0C04B3AB F5413256 5044B0B7 D7BFD8BA 270B3943
+ 2355FFB4"""
+ ),
+ 16,
+)
+_Gx = int(
+ remove_whitespace(
+ """
+ B70E0CBD 6BB4BF7F 321390B9 4A03C1D3 56C21122 343280D6
+ 115C1D21"""
+ ),
+ 16,
+)
+_Gy = int(
+ remove_whitespace(
+ """
+ BD376388 B5F723FB 4C22DFE6 CD4375A0 5A074764 44D58199
+ 85007E34"""
+ ),
+ 16,
+)
+
+curve_224 = ellipticcurve.CurveFp(_p, -3, _b, 1)
+generator_224 = ellipticcurve.PointJacobi(
+ curve_224, _Gx, _Gy, 1, _r, generator=True
+)
+
+# NIST Curve P-256:
+_p = int(
+ remove_whitespace(
+ """
+ 1157920892103562487626974469494075735300861434152903141955
+ 33631308867097853951"""
+ )
+)
+_r = int(
+ remove_whitespace(
+ """
+ 115792089210356248762697446949407573529996955224135760342
+ 422259061068512044369"""
+ )
+)
+# s = 0xc49d360886e704936a6678e1139d26b7819f7e90L
+# c = 0x7efba1662985be9403cb055c75d4f7e0ce8d84a9c5114abcaf3177680104fa0dL
+_b = int(
+ remove_whitespace(
+ """
+ 5AC635D8 AA3A93E7 B3EBBD55 769886BC 651D06B0 CC53B0F6
+ 3BCE3C3E 27D2604B"""
+ ),
+ 16,
+)
+_Gx = int(
+ remove_whitespace(
+ """
+ 6B17D1F2 E12C4247 F8BCE6E5 63A440F2 77037D81 2DEB33A0
+ F4A13945 D898C296"""
+ ),
+ 16,
+)
+_Gy = int(
+ remove_whitespace(
+ """
+ 4FE342E2 FE1A7F9B 8EE7EB4A 7C0F9E16 2BCE3357 6B315ECE
+ CBB64068 37BF51F5"""
+ ),
+ 16,
+)
+
+curve_256 = ellipticcurve.CurveFp(_p, -3, _b, 1)
+generator_256 = ellipticcurve.PointJacobi(
+ curve_256, _Gx, _Gy, 1, _r, generator=True
+)
+
+# NIST Curve P-384:
+_p = int(
+ remove_whitespace(
+ """
+ 3940200619639447921227904010014361380507973927046544666794
+ 8293404245721771496870329047266088258938001861606973112319"""
+ )
+)
+_r = int(
+ remove_whitespace(
+ """
+ 3940200619639447921227904010014361380507973927046544666794
+ 6905279627659399113263569398956308152294913554433653942643"""
+ )
+)
+# s = 0xa335926aa319a27a1d00896a6773a4827acdac73L
+# c = int(remove_whitespace(
+# """
+# 79d1e655 f868f02f ff48dcde e14151dd b80643c1 406d0ca1
+# 0dfe6fc5 2009540a 495e8042 ea5f744f 6e184667 cc722483"""
+# ), 16)
+_b = int(
+ remove_whitespace(
+ """
+ B3312FA7 E23EE7E4 988E056B E3F82D19 181D9C6E FE814112
+ 0314088F 5013875A C656398D 8A2ED19D 2A85C8ED D3EC2AEF"""
+ ),
+ 16,
+)
+_Gx = int(
+ remove_whitespace(
+ """
+ AA87CA22 BE8B0537 8EB1C71E F320AD74 6E1D3B62 8BA79B98
+ 59F741E0 82542A38 5502F25D BF55296C 3A545E38 72760AB7"""
+ ),
+ 16,
+)
+_Gy = int(
+ remove_whitespace(
+ """
+ 3617DE4A 96262C6F 5D9E98BF 9292DC29 F8F41DBD 289A147C
+ E9DA3113 B5F0B8C0 0A60B1CE 1D7E819D 7A431D7C 90EA0E5F"""
+ ),
+ 16,
+)
+
+curve_384 = ellipticcurve.CurveFp(_p, -3, _b, 1)
+generator_384 = ellipticcurve.PointJacobi(
+ curve_384, _Gx, _Gy, 1, _r, generator=True
+)
+
+# NIST Curve P-521:
+_p = int(
+ "686479766013060971498190079908139321726943530014330540939"
+ "446345918554318339765605212255964066145455497729631139148"
+ "0858037121987999716643812574028291115057151"
+)
+_r = int(
+ "686479766013060971498190079908139321726943530014330540939"
+ "446345918554318339765539424505774633321719753296399637136"
+ "3321113864768612440380340372808892707005449"
+)
+# s = 0xd09e8800291cb85396cc6717393284aaa0da64baL
+# c = int(remove_whitespace(
+# """
+# 0b4 8bfa5f42 0a349495 39d2bdfc 264eeeeb 077688e4
+# 4fbf0ad8 f6d0edb3 7bd6b533 28100051 8e19f1b9 ffbe0fe9
+# ed8a3c22 00b8f875 e523868c 70c1e5bf 55bad637"""
+# ), 16)
+_b = int(
+ remove_whitespace(
+ """
+ 051 953EB961 8E1C9A1F 929A21A0 B68540EE A2DA725B
+ 99B315F3 B8B48991 8EF109E1 56193951 EC7E937B 1652C0BD
+ 3BB1BF07 3573DF88 3D2C34F1 EF451FD4 6B503F00"""
+ ),
+ 16,
+)
+_Gx = int(
+ remove_whitespace(
+ """
+ C6 858E06B7 0404E9CD 9E3ECB66 2395B442 9C648139
+ 053FB521 F828AF60 6B4D3DBA A14B5E77 EFE75928 FE1DC127
+ A2FFA8DE 3348B3C1 856A429B F97E7E31 C2E5BD66"""
+ ),
+ 16,
+)
+_Gy = int(
+ remove_whitespace(
+ """
+ 118 39296A78 9A3BC004 5C8A5FB4 2C7D1BD9 98F54449
+ 579B4468 17AFBD17 273E662C 97EE7299 5EF42640 C550B901
+ 3FAD0761 353C7086 A272C240 88BE9476 9FD16650"""
+ ),
+ 16,
+)
+
+curve_521 = ellipticcurve.CurveFp(_p, -3, _b, 1)
+generator_521 = ellipticcurve.PointJacobi(
+ curve_521, _Gx, _Gy, 1, _r, generator=True
+)
+
+# Certicom secp256-k1
+_a = 0x0000000000000000000000000000000000000000000000000000000000000000
+_b = 0x0000000000000000000000000000000000000000000000000000000000000007
+_p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F
+_Gx = 0x79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798
+_Gy = 0x483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8
+_r = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141
+
+curve_secp256k1 = ellipticcurve.CurveFp(_p, _a, _b, 1)
+generator_secp256k1 = ellipticcurve.PointJacobi(
+ curve_secp256k1, _Gx, _Gy, 1, _r, generator=True
+)
+
+# Brainpool P-160-r1
+_a = 0x340E7BE2A280EB74E2BE61BADA745D97E8F7C300
+_b = 0x1E589A8595423412134FAA2DBDEC95C8D8675E58
+_p = 0xE95E4A5F737059DC60DFC7AD95B3D8139515620F
+_Gx = 0xBED5AF16EA3F6A4F62938C4631EB5AF7BDBCDBC3
+_Gy = 0x1667CB477A1A8EC338F94741669C976316DA6321
+_q = 0xE95E4A5F737059DC60DF5991D45029409E60FC09
+
+curve_brainpoolp160r1 = ellipticcurve.CurveFp(_p, _a, _b, 1)
+generator_brainpoolp160r1 = ellipticcurve.PointJacobi(
+ curve_brainpoolp160r1, _Gx, _Gy, 1, _q, generator=True
+)
+
+# Brainpool P-192-r1
+_a = 0x6A91174076B1E0E19C39C031FE8685C1CAE040E5C69A28EF
+_b = 0x469A28EF7C28CCA3DC721D044F4496BCCA7EF4146FBF25C9
+_p = 0xC302F41D932A36CDA7A3463093D18DB78FCE476DE1A86297
+_Gx = 0xC0A0647EAAB6A48753B033C56CB0F0900A2F5C4853375FD6
+_Gy = 0x14B690866ABD5BB88B5F4828C1490002E6773FA2FA299B8F
+_q = 0xC302F41D932A36CDA7A3462F9E9E916B5BE8F1029AC4ACC1
+
+curve_brainpoolp192r1 = ellipticcurve.CurveFp(_p, _a, _b, 1)
+generator_brainpoolp192r1 = ellipticcurve.PointJacobi(
+ curve_brainpoolp192r1, _Gx, _Gy, 1, _q, generator=True
+)
+
+# Brainpool P-224-r1
+_a = 0x68A5E62CA9CE6C1C299803A6C1530B514E182AD8B0042A59CAD29F43
+_b = 0x2580F63CCFE44138870713B1A92369E33E2135D266DBB372386C400B
+_p = 0xD7C134AA264366862A18302575D1D787B09F075797DA89F57EC8C0FF
+_Gx = 0x0D9029AD2C7E5CF4340823B2A87DC68C9E4CE3174C1E6EFDEE12C07D
+_Gy = 0x58AA56F772C0726F24C6B89E4ECDAC24354B9E99CAA3F6D3761402CD
+_q = 0xD7C134AA264366862A18302575D0FB98D116BC4B6DDEBCA3A5A7939F
+
+curve_brainpoolp224r1 = ellipticcurve.CurveFp(_p, _a, _b, 1)
+generator_brainpoolp224r1 = ellipticcurve.PointJacobi(
+ curve_brainpoolp224r1, _Gx, _Gy, 1, _q, generator=True
+)
+
+# Brainpool P-256-r1
+_a = 0x7D5A0975FC2C3057EEF67530417AFFE7FB8055C126DC5C6CE94A4B44F330B5D9
+_b = 0x26DC5C6CE94A4B44F330B5D9BBD77CBF958416295CF7E1CE6BCCDC18FF8C07B6
+_p = 0xA9FB57DBA1EEA9BC3E660A909D838D726E3BF623D52620282013481D1F6E5377
+_Gx = 0x8BD2AEB9CB7E57CB2C4B482FFC81B7AFB9DE27E1E3BD23C23A4453BD9ACE3262
+_Gy = 0x547EF835C3DAC4FD97F8461A14611DC9C27745132DED8E545C1D54C72F046997
+_q = 0xA9FB57DBA1EEA9BC3E660A909D838D718C397AA3B561A6F7901E0E82974856A7
+
+curve_brainpoolp256r1 = ellipticcurve.CurveFp(_p, _a, _b, 1)
+generator_brainpoolp256r1 = ellipticcurve.PointJacobi(
+ curve_brainpoolp256r1, _Gx, _Gy, 1, _q, generator=True
+)
+
+# Brainpool P-320-r1
+_a = int(
+ remove_whitespace(
+ """
+ 3EE30B568FBAB0F883CCEBD46D3F3BB8A2A73513F5EB79DA66190EB085FFA9
+ F492F375A97D860EB4"""
+ ),
+ 16,
+)
+_b = int(
+ remove_whitespace(
+ """
+ 520883949DFDBC42D3AD198640688A6FE13F41349554B49ACC31DCCD884539
+ 816F5EB4AC8FB1F1A6"""
+ ),
+ 16,
+)
+_p = int(
+ remove_whitespace(
+ """
+ D35E472036BC4FB7E13C785ED201E065F98FCFA6F6F40DEF4F92B9EC7893EC
+ 28FCD412B1F1B32E27"""
+ ),
+ 16,
+)
+_Gx = int(
+ remove_whitespace(
+ """
+ 43BD7E9AFB53D8B85289BCC48EE5BFE6F20137D10A087EB6E7871E2A10A599
+ C710AF8D0D39E20611"""
+ ),
+ 16,
+)
+_Gy = int(
+ remove_whitespace(
+ """
+ 14FDD05545EC1CC8AB4093247F77275E0743FFED117182EAA9C77877AAAC6A
+ C7D35245D1692E8EE1"""
+ ),
+ 16,
+)
+_q = int(
+ remove_whitespace(
+ """
+ D35E472036BC4FB7E13C785ED201E065F98FCFA5B68F12A32D482EC7EE8658
+ E98691555B44C59311"""
+ ),
+ 16,
+)
+
+curve_brainpoolp320r1 = ellipticcurve.CurveFp(_p, _a, _b, 1)
+generator_brainpoolp320r1 = ellipticcurve.PointJacobi(
+ curve_brainpoolp320r1, _Gx, _Gy, 1, _q, generator=True
+)
+
+# Brainpool P-384-r1
+_a = int(
+ remove_whitespace(
+ """
+ 7BC382C63D8C150C3C72080ACE05AFA0C2BEA28E4FB22787139165EFBA91F9
+ 0F8AA5814A503AD4EB04A8C7DD22CE2826"""
+ ),
+ 16,
+)
+_b = int(
+ remove_whitespace(
+ """
+ 04A8C7DD22CE28268B39B55416F0447C2FB77DE107DCD2A62E880EA53EEB62
+ D57CB4390295DBC9943AB78696FA504C11"""
+ ),
+ 16,
+)
+_p = int(
+ remove_whitespace(
+ """
+ 8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B412B1DA197FB711
+ 23ACD3A729901D1A71874700133107EC53"""
+ ),
+ 16,
+)
+_Gx = int(
+ remove_whitespace(
+ """
+ 1D1C64F068CF45FFA2A63A81B7C13F6B8847A3E77EF14FE3DB7FCAFE0CBD10
+ E8E826E03436D646AAEF87B2E247D4AF1E"""
+ ),
+ 16,
+)
+_Gy = int(
+ remove_whitespace(
+ """
+ 8ABE1D7520F9C2A45CB1EB8E95CFD55262B70B29FEEC5864E19C054FF991292
+ 80E4646217791811142820341263C5315"""
+ ),
+ 16,
+)
+_q = int(
+ remove_whitespace(
+ """
+ 8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B31F166E6CAC0425
+ A7CF3AB6AF6B7FC3103B883202E9046565"""
+ ),
+ 16,
+)
+
+curve_brainpoolp384r1 = ellipticcurve.CurveFp(_p, _a, _b, 1)
+generator_brainpoolp384r1 = ellipticcurve.PointJacobi(
+ curve_brainpoolp384r1, _Gx, _Gy, 1, _q, generator=True
+)
+
+# Brainpool P-512-r1
+_a = int(
+ remove_whitespace(
+ """
+ 7830A3318B603B89E2327145AC234CC594CBDD8D3DF91610A83441CAEA9863
+ BC2DED5D5AA8253AA10A2EF1C98B9AC8B57F1117A72BF2C7B9E7C1AC4D77FC94CA"""
+ ),
+ 16,
+)
+_b = int(
+ remove_whitespace(
+ """
+ 3DF91610A83441CAEA9863BC2DED5D5AA8253AA10A2EF1C98B9AC8B57F1117
+ A72BF2C7B9E7C1AC4D77FC94CADC083E67984050B75EBAE5DD2809BD638016F723"""
+ ),
+ 16,
+)
+_p = int(
+ remove_whitespace(
+ """
+ AADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA703308
+ 717D4D9B009BC66842AECDA12AE6A380E62881FF2F2D82C68528AA6056583A48F3"""
+ ),
+ 16,
+)
+_Gx = int(
+ remove_whitespace(
+ """
+ 81AEE4BDD82ED9645A21322E9C4C6A9385ED9F70B5D916C1B43B62EEF4D009
+ 8EFF3B1F78E2D0D48D50D1687B93B97D5F7C6D5047406A5E688B352209BCB9F822"""
+ ),
+ 16,
+)
+_Gy = int(
+ remove_whitespace(
+ """
+ 7DDE385D566332ECC0EABFA9CF7822FDF209F70024A57B1AA000C55B881F81
+ 11B2DCDE494A5F485E5BCA4BD88A2763AED1CA2B2FA8F0540678CD1E0F3AD80892"""
+ ),
+ 16,
+)
+_q = int(
+ remove_whitespace(
+ """
+ AADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA703308
+ 70553E5C414CA92619418661197FAC10471DB1D381085DDADDB58796829CA90069"""
+ ),
+ 16,
+)
+
+curve_brainpoolp512r1 = ellipticcurve.CurveFp(_p, _a, _b, 1)
+generator_brainpoolp512r1 = ellipticcurve.PointJacobi(
+ curve_brainpoolp512r1, _Gx, _Gy, 1, _q, generator=True
+)
diff --git a/frozen_deps/ecdsa/ellipticcurve.py b/frozen_deps/ecdsa/ellipticcurve.py
new file mode 100644
index 0000000..25565df
--- /dev/null
+++ b/frozen_deps/ecdsa/ellipticcurve.py
@@ -0,0 +1,847 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Implementation of elliptic curves, for cryptographic applications.
+#
+# This module doesn't provide any way to choose a random elliptic
+# curve, nor to verify that an elliptic curve was chosen randomly,
+# because one can simply use NIST's standard curves.
+#
+# Notes from X9.62-1998 (draft):
+# Nomenclature:
+# - Q is a public key.
+# The "Elliptic Curve Domain Parameters" include:
+# - q is the "field size", which in our case equals p.
+# - p is a big prime.
+# - G is a point of prime order (5.1.1.1).
+# - n is the order of G (5.1.1.1).
+# Public-key validation (5.2.2):
+# - Verify that Q is not the point at infinity.
+# - Verify that X_Q and Y_Q are in [0,p-1].
+# - Verify that Q is on the curve.
+# - Verify that nQ is the point at infinity.
+# Signature generation (5.3):
+# - Pick random k from [1,n-1].
+# Signature checking (5.4.2):
+# - Verify that r and s are in [1,n-1].
+#
+# Version of 2008.11.25.
+#
+# Revision history:
+# 2005.12.31 - Initial version.
+# 2008.11.25 - Change CurveFp.is_on to contains_point.
+#
+# Written in 2005 by Peter Pearson and placed in the public domain.
+
+from __future__ import division
+
+try:
+ from gmpy2 import mpz
+
+ GMPY = True
+except ImportError:
+ try:
+ from gmpy import mpz
+
+ GMPY = True
+ except ImportError:
+ GMPY = False
+
+
+from six import python_2_unicode_compatible
+from . import numbertheory
+from ._rwlock import RWLock
+
+
+@python_2_unicode_compatible
+class CurveFp(object):
+ """Elliptic Curve over the field of integers modulo a prime."""
+
+ if GMPY:
+
+ def __init__(self, p, a, b, h=None):
+ """
+ The curve of points satisfying y^2 = x^3 + a*x + b (mod p).
+
+ h is an integer that is the cofactor of the elliptic curve domain
+ parameters; it is the number of points satisfying the elliptic
+ curve equation divided by the order of the base point. It is used
+ for selection of efficient algorithm for public point verification.
+ """
+ self.__p = mpz(p)
+ self.__a = mpz(a)
+ self.__b = mpz(b)
+ # h is not used in calculations and it can be None, so don't use
+ # gmpy with it
+ self.__h = h
+
+ else:
+
+ def __init__(self, p, a, b, h=None):
+ """
+ The curve of points satisfying y^2 = x^3 + a*x + b (mod p).
+
+ h is an integer that is the cofactor of the elliptic curve domain
+ parameters; it is the number of points satisfying the elliptic
+ curve equation divided by the order of the base point. It is used
+ for selection of efficient algorithm for public point verification.
+ """
+ self.__p = p
+ self.__a = a
+ self.__b = b
+ self.__h = h
+
+ def __eq__(self, other):
+ if isinstance(other, CurveFp):
+ """Return True if the curves are identical, False otherwise."""
+ return (
+ self.__p == other.__p
+ and self.__a == other.__a
+ and self.__b == other.__b
+ )
+ return NotImplemented
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __hash__(self):
+ return hash((self.__p, self.__a, self.__b))
+
+ def p(self):
+ return self.__p
+
+ def a(self):
+ return self.__a
+
+ def b(self):
+ return self.__b
+
+ def cofactor(self):
+ return self.__h
+
+ def contains_point(self, x, y):
+ """Is the point (x,y) on this curve?"""
+ return (y * y - ((x * x + self.__a) * x + self.__b)) % self.__p == 0
+
+ def __str__(self):
+ return "CurveFp(p=%d, a=%d, b=%d, h=%d)" % (
+ self.__p,
+ self.__a,
+ self.__b,
+ self.__h,
+ )
+
+
+class PointJacobi(object):
+ """
+ Point on an elliptic curve. Uses Jacobi coordinates.
+
+ In Jacobian coordinates, there are three parameters, X, Y and Z.
+ They correspond to affine parameters 'x' and 'y' like so:
+
+ x = X / Z²
+ y = Y / Z³
+ """
+
+ def __init__(self, curve, x, y, z, order=None, generator=False):
+ """
+ Initialise a point that uses Jacobi representation internally.
+
+ :param CurveFp curve: curve on which the point resides
+ :param int x: the X parameter of Jacobi representation (equal to x when
+ converting from affine coordinates
+ :param int y: the Y parameter of Jacobi representation (equal to y when
+ converting from affine coordinates
+ :param int z: the Z parameter of Jacobi representation (equal to 1 when
+ converting from affine coordinates
+ :param int order: the point order, must be non zero when using
+ generator=True
+ :param bool generator: the point provided is a curve generator, as
+ such, it will be commonly used with scalar multiplication. This will
+ cause to precompute multiplication table for it
+ """
+ self.__curve = curve
+ # since it's generally better (faster) to use scaled points vs unscaled
+ # ones, use writer-biased RWLock for locking:
+ self._update_lock = RWLock()
+ if GMPY:
+ self.__x = mpz(x)
+ self.__y = mpz(y)
+ self.__z = mpz(z)
+ self.__order = order and mpz(order)
+ else:
+ self.__x = x
+ self.__y = y
+ self.__z = z
+ self.__order = order
+ self.__generator = generator
+ self.__precompute = []
+
+ def _maybe_precompute(self):
+ if self.__generator:
+ # since we lack promotion of read-locks to write-locks, we do a
+ # "acquire-read-lock, check, acquire-write-lock plus recheck" cycle
+ try:
+ self._update_lock.reader_acquire()
+ if self.__precompute:
+ return
+ finally:
+ self._update_lock.reader_release()
+
+ try:
+ self._update_lock.writer_acquire()
+ if self.__precompute:
+ return
+ order = self.__order
+ assert order
+ i = 1
+ order *= 2
+ doubler = PointJacobi(
+ self.__curve, self.__x, self.__y, self.__z, order
+ )
+ order *= 2
+ self.__precompute.append((doubler.x(), doubler.y()))
+
+ while i < order:
+ i *= 2
+ doubler = doubler.double().scale()
+ self.__precompute.append((doubler.x(), doubler.y()))
+
+ finally:
+ self._update_lock.writer_release()
+
+ def __getstate__(self):
+ try:
+ self._update_lock.reader_acquire()
+ state = self.__dict__.copy()
+ finally:
+ self._update_lock.reader_release()
+ del state["_update_lock"]
+ return state
+
+ def __setstate__(self, state):
+ self.__dict__.update(state)
+ self._update_lock = RWLock()
+
+ def __eq__(self, other):
+ """Compare two points with each-other."""
+ try:
+ self._update_lock.reader_acquire()
+ if other is INFINITY:
+ return not self.__y or not self.__z
+ x1, y1, z1 = self.__x, self.__y, self.__z
+ finally:
+ self._update_lock.reader_release()
+ if isinstance(other, Point):
+ x2, y2, z2 = other.x(), other.y(), 1
+ elif isinstance(other, PointJacobi):
+ try:
+ other._update_lock.reader_acquire()
+ x2, y2, z2 = other.__x, other.__y, other.__z
+ finally:
+ other._update_lock.reader_release()
+ else:
+ return NotImplemented
+ if self.__curve != other.curve():
+ return False
+ p = self.__curve.p()
+
+ zz1 = z1 * z1 % p
+ zz2 = z2 * z2 % p
+
+ # compare the fractions by bringing them to the same denominator
+ # depend on short-circuit to save 4 multiplications in case of
+ # inequality
+ return (x1 * zz2 - x2 * zz1) % p == 0 and (
+ y1 * zz2 * z2 - y2 * zz1 * z1
+ ) % p == 0
+
+ def order(self):
+ """Return the order of the point.
+
+ None if it is undefined.
+ """
+ return self.__order
+
+ def curve(self):
+ """Return curve over which the point is defined."""
+ return self.__curve
+
+ def x(self):
+ """
+ Return affine x coordinate.
+
+ This method should be used only when the 'y' coordinate is not needed.
+ It's computationally more efficient to use `to_affine()` and then
+ call x() and y() on the returned instance. Or call `scale()`
+ and then x() and y() on the returned instance.
+ """
+ try:
+ self._update_lock.reader_acquire()
+ if self.__z == 1:
+ return self.__x
+ x = self.__x
+ z = self.__z
+ finally:
+ self._update_lock.reader_release()
+ p = self.__curve.p()
+ z = numbertheory.inverse_mod(z, p)
+ return x * z ** 2 % p
+
+ def y(self):
+ """
+ Return affine y coordinate.
+
+ This method should be used only when the 'x' coordinate is not needed.
+ It's computationally more efficient to use `to_affine()` and then
+ call x() and y() on the returned instance. Or call `scale()`
+ and then x() and y() on the returned instance.
+ """
+ try:
+ self._update_lock.reader_acquire()
+ if self.__z == 1:
+ return self.__y
+ y = self.__y
+ z = self.__z
+ finally:
+ self._update_lock.reader_release()
+ p = self.__curve.p()
+ z = numbertheory.inverse_mod(z, p)
+ return y * z ** 3 % p
+
+ def scale(self):
+ """
+ Return point scaled so that z == 1.
+
+ Modifies point in place, returns self.
+ """
+ try:
+ self._update_lock.reader_acquire()
+ if self.__z == 1:
+ return self
+ finally:
+ self._update_lock.reader_release()
+
+ try:
+ self._update_lock.writer_acquire()
+ # scaling already scaled point is safe (as inverse of 1 is 1) and
+ # quick so we don't need to optimise for the unlikely event when
+ # two threads hit the lock at the same time
+ p = self.__curve.p()
+ z_inv = numbertheory.inverse_mod(self.__z, p)
+ zz_inv = z_inv * z_inv % p
+ self.__x = self.__x * zz_inv % p
+ self.__y = self.__y * zz_inv * z_inv % p
+ # we are setting the z last so that the check above will return
+ # true only after all values were already updated
+ self.__z = 1
+ finally:
+ self._update_lock.writer_release()
+ return self
+
+ def to_affine(self):
+ """Return point in affine form."""
+ if not self.__y or not self.__z:
+ return INFINITY
+ self.scale()
+ # after point is scaled, it's immutable, so no need to perform locking
+ return Point(self.__curve, self.__x, self.__y, self.__order)
+
+ @staticmethod
+ def from_affine(point, generator=False):
+ """Create from an affine point.
+
+ :param bool generator: set to True to make the point to precalculate
+ multiplication table - useful for public point when verifying many
+ signatures (around 100 or so) or for generator points of a curve.
+ """
+ return PointJacobi(
+ point.curve(), point.x(), point.y(), 1, point.order(), generator
+ )
+
+ # plese note that all the methods that use the equations from hyperelliptic
+ # are formatted in a way to maximise performance.
+ # Things that make code faster: multiplying instead of taking to the power
+ # (`xx = x * x; xxxx = xx * xx % p` is faster than `xxxx = x**4 % p` and
+ # `pow(x, 4, p)`),
+ # multiple assignments at the same time (`x1, x2 = self.x1, self.x2` is
+ # faster than `x1 = self.x1; x2 = self.x2`),
+ # similarly, sometimes the `% p` is skipped if it makes the calculation
+ # faster and the result of calculation is later reduced modulo `p`
+
+ def _double_with_z_1(self, X1, Y1, p, a):
+ """Add a point to itself with z == 1."""
+ # after:
+ # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#doubling-mdbl-2007-bl
+ XX, YY = X1 * X1 % p, Y1 * Y1 % p
+ if not YY:
+ return 0, 0, 1
+ YYYY = YY * YY % p
+ S = 2 * ((X1 + YY) ** 2 - XX - YYYY) % p
+ M = 3 * XX + a
+ T = (M * M - 2 * S) % p
+ # X3 = T
+ Y3 = (M * (S - T) - 8 * YYYY) % p
+ Z3 = 2 * Y1 % p
+ return T, Y3, Z3
+
+ def _double(self, X1, Y1, Z1, p, a):
+ """Add a point to itself, arbitrary z."""
+ if Z1 == 1:
+ return self._double_with_z_1(X1, Y1, p, a)
+ if not Z1:
+ return 0, 0, 1
+ # after:
+ # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#doubling-dbl-2007-bl
+ XX, YY = X1 * X1 % p, Y1 * Y1 % p
+ if not YY:
+ return 0, 0, 1
+ YYYY = YY * YY % p
+ ZZ = Z1 * Z1 % p
+ S = 2 * ((X1 + YY) ** 2 - XX - YYYY) % p
+ M = (3 * XX + a * ZZ * ZZ) % p
+ T = (M * M - 2 * S) % p
+ # X3 = T
+ Y3 = (M * (S - T) - 8 * YYYY) % p
+ Z3 = ((Y1 + Z1) ** 2 - YY - ZZ) % p
+
+ return T, Y3, Z3
+
+ def double(self):
+ """Add a point to itself."""
+ if not self.__y:
+ return INFINITY
+
+ p, a = self.__curve.p(), self.__curve.a()
+
+ try:
+ self._update_lock.reader_acquire()
+ X1, Y1, Z1 = self.__x, self.__y, self.__z
+ finally:
+ self._update_lock.reader_release()
+
+ X3, Y3, Z3 = self._double(X1, Y1, Z1, p, a)
+
+ if not Y3 or not Z3:
+ return INFINITY
+ return PointJacobi(self.__curve, X3, Y3, Z3, self.__order)
+
+ def _add_with_z_1(self, X1, Y1, X2, Y2, p):
+ """add points when both Z1 and Z2 equal 1"""
+ # after:
+ # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#addition-mmadd-2007-bl
+ H = X2 - X1
+ HH = H * H
+ I = 4 * HH % p
+ J = H * I
+ r = 2 * (Y2 - Y1)
+ if not H and not r:
+ return self._double_with_z_1(X1, Y1, p, self.__curve.a())
+ V = X1 * I
+ X3 = (r ** 2 - J - 2 * V) % p
+ Y3 = (r * (V - X3) - 2 * Y1 * J) % p
+ Z3 = 2 * H % p
+ return X3, Y3, Z3
+
+ def _add_with_z_eq(self, X1, Y1, Z1, X2, Y2, p):
+ """add points when Z1 == Z2"""
+ # after:
+ # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#addition-zadd-2007-m
+ A = (X2 - X1) ** 2 % p
+ B = X1 * A % p
+ C = X2 * A
+ D = (Y2 - Y1) ** 2 % p
+ if not A and not D:
+ return self._double(X1, Y1, Z1, p, self.__curve.a())
+ X3 = (D - B - C) % p
+ Y3 = ((Y2 - Y1) * (B - X3) - Y1 * (C - B)) % p
+ Z3 = Z1 * (X2 - X1) % p
+ return X3, Y3, Z3
+
+ def _add_with_z2_1(self, X1, Y1, Z1, X2, Y2, p):
+ """add points when Z2 == 1"""
+ # after:
+ # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#addition-madd-2007-bl
+ Z1Z1 = Z1 * Z1 % p
+ U2, S2 = X2 * Z1Z1 % p, Y2 * Z1 * Z1Z1 % p
+ H = (U2 - X1) % p
+ HH = H * H % p
+ I = 4 * HH % p
+ J = H * I
+ r = 2 * (S2 - Y1) % p
+ if not r and not H:
+ return self._double_with_z_1(X2, Y2, p, self.__curve.a())
+ V = X1 * I
+ X3 = (r * r - J - 2 * V) % p
+ Y3 = (r * (V - X3) - 2 * Y1 * J) % p
+ Z3 = ((Z1 + H) ** 2 - Z1Z1 - HH) % p
+ return X3, Y3, Z3
+
+ def _add_with_z_ne(self, X1, Y1, Z1, X2, Y2, Z2, p):
+ """add points with arbitrary z"""
+ # after:
+ # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#addition-add-2007-bl
+ Z1Z1 = Z1 * Z1 % p
+ Z2Z2 = Z2 * Z2 % p
+ U1 = X1 * Z2Z2 % p
+ U2 = X2 * Z1Z1 % p
+ S1 = Y1 * Z2 * Z2Z2 % p
+ S2 = Y2 * Z1 * Z1Z1 % p
+ H = U2 - U1
+ I = 4 * H * H % p
+ J = H * I % p
+ r = 2 * (S2 - S1) % p
+ if not H and not r:
+ return self._double(X1, Y1, Z1, p, self.__curve.a())
+ V = U1 * I
+ X3 = (r * r - J - 2 * V) % p
+ Y3 = (r * (V - X3) - 2 * S1 * J) % p
+ Z3 = ((Z1 + Z2) ** 2 - Z1Z1 - Z2Z2) * H % p
+
+ return X3, Y3, Z3
+
+ def __radd__(self, other):
+ """Add other to self."""
+ return self + other
+
+ def _add(self, X1, Y1, Z1, X2, Y2, Z2, p):
+ """add two points, select fastest method."""
+ if not Y1 or not Z1:
+ return X2, Y2, Z2
+ if not Y2 or not Z2:
+ return X1, Y1, Z1
+ if Z1 == Z2:
+ if Z1 == 1:
+ return self._add_with_z_1(X1, Y1, X2, Y2, p)
+ return self._add_with_z_eq(X1, Y1, Z1, X2, Y2, p)
+ if Z1 == 1:
+ return self._add_with_z2_1(X2, Y2, Z2, X1, Y1, p)
+ if Z2 == 1:
+ return self._add_with_z2_1(X1, Y1, Z1, X2, Y2, p)
+ return self._add_with_z_ne(X1, Y1, Z1, X2, Y2, Z2, p)
+
+ def __add__(self, other):
+ """Add two points on elliptic curve."""
+ if self == INFINITY:
+ return other
+ if other == INFINITY:
+ return self
+ if isinstance(other, Point):
+ other = PointJacobi.from_affine(other)
+ if self.__curve != other.__curve:
+ raise ValueError("The other point is on different curve")
+
+ p = self.__curve.p()
+ try:
+ self._update_lock.reader_acquire()
+ X1, Y1, Z1 = self.__x, self.__y, self.__z
+ finally:
+ self._update_lock.reader_release()
+ try:
+ other._update_lock.reader_acquire()
+ X2, Y2, Z2 = other.__x, other.__y, other.__z
+ finally:
+ other._update_lock.reader_release()
+ X3, Y3, Z3 = self._add(X1, Y1, Z1, X2, Y2, Z2, p)
+
+ if not Y3 or not Z3:
+ return INFINITY
+ return PointJacobi(self.__curve, X3, Y3, Z3, self.__order)
+
+ def __rmul__(self, other):
+ """Multiply point by an integer."""
+ return self * other
+
+ def _mul_precompute(self, other):
+ """Multiply point by integer with precomputation table."""
+ X3, Y3, Z3, p = 0, 0, 1, self.__curve.p()
+ _add = self._add
+ for X2, Y2 in self.__precompute:
+ if other % 2:
+ if other % 4 >= 2:
+ other = (other + 1) // 2
+ X3, Y3, Z3 = _add(X3, Y3, Z3, X2, -Y2, 1, p)
+ else:
+ other = (other - 1) // 2
+ X3, Y3, Z3 = _add(X3, Y3, Z3, X2, Y2, 1, p)
+ else:
+ other //= 2
+
+ if not Y3 or not Z3:
+ return INFINITY
+ return PointJacobi(self.__curve, X3, Y3, Z3, self.__order)
+
+ @staticmethod
+ def _naf(mult):
+ """Calculate non-adjacent form of number."""
+ ret = []
+ while mult:
+ if mult % 2:
+ nd = mult % 4
+ if nd >= 2:
+ nd = nd - 4
+ ret += [nd]
+ mult -= nd
+ else:
+ ret += [0]
+ mult //= 2
+ return ret
+
+ def __mul__(self, other):
+ """Multiply point by an integer."""
+ if not self.__y or not other:
+ return INFINITY
+ if other == 1:
+ return self
+ if self.__order:
+ # order*2 as a protection for Minerva
+ other = other % (self.__order * 2)
+ self._maybe_precompute()
+ if self.__precompute:
+ return self._mul_precompute(other)
+
+ self = self.scale()
+ # once scaled, point is immutable, not need to lock
+ X2, Y2 = self.__x, self.__y
+ X3, Y3, Z3 = 0, 0, 1
+ p, a = self.__curve.p(), self.__curve.a()
+ _double = self._double
+ _add = self._add
+ # since adding points when at least one of them is scaled
+ # is quicker, reverse the NAF order
+ for i in reversed(self._naf(other)):
+ X3, Y3, Z3 = _double(X3, Y3, Z3, p, a)
+ if i < 0:
+ X3, Y3, Z3 = _add(X3, Y3, Z3, X2, -Y2, 1, p)
+ elif i > 0:
+ X3, Y3, Z3 = _add(X3, Y3, Z3, X2, Y2, 1, p)
+
+ if not Y3 or not Z3:
+ return INFINITY
+
+ return PointJacobi(self.__curve, X3, Y3, Z3, self.__order)
+
+ @staticmethod
+ def _leftmost_bit(x):
+ """Return integer with the same magnitude as x but only one bit set"""
+ assert x > 0
+ result = 1
+ while result <= x:
+ result = 2 * result
+ return result // 2
+
+ def mul_add(self, self_mul, other, other_mul):
+ """
+ Do two multiplications at the same time, add results.
+
+ calculates self*self_mul + other*other_mul
+ """
+ if other is INFINITY or other_mul == 0:
+ return self * self_mul
+ if self_mul == 0:
+ return other * other_mul
+ if not isinstance(other, PointJacobi):
+ other = PointJacobi.from_affine(other)
+ # when the points have precomputed answers, then multiplying them alone
+ # is faster (as it uses NAF)
+ self._maybe_precompute()
+ other._maybe_precompute()
+ if self.__precompute and other.__precompute:
+ return self * self_mul + other * other_mul
+
+ if self.__order:
+ self_mul = self_mul % self.__order
+ other_mul = other_mul % self.__order
+
+ i = self._leftmost_bit(max(self_mul, other_mul)) * 2
+ X3, Y3, Z3 = 0, 0, 1
+ p, a = self.__curve.p(), self.__curve.a()
+ self = self.scale()
+ # after scaling, point is immutable, no need for locking
+ X1, Y1 = self.__x, self.__y
+ other = other.scale()
+ X2, Y2 = other.__x, other.__y
+ both = self + other
+ if both is INFINITY:
+ X4, Y4 = 0, 0
+ else:
+ both.scale()
+ X4, Y4 = both.__x, both.__y
+ _double = self._double
+ _add = self._add
+ while i > 1:
+ X3, Y3, Z3 = _double(X3, Y3, Z3, p, a)
+ i = i // 2
+
+ if self_mul & i and other_mul & i:
+ X3, Y3, Z3 = _add(X3, Y3, Z3, X4, Y4, 1, p)
+ elif self_mul & i:
+ X3, Y3, Z3 = _add(X3, Y3, Z3, X1, Y1, 1, p)
+ elif other_mul & i:
+ X3, Y3, Z3 = _add(X3, Y3, Z3, X2, Y2, 1, p)
+
+ if not Y3 or not Z3:
+ return INFINITY
+
+ return PointJacobi(self.__curve, X3, Y3, Z3, self.__order)
+
+ def __neg__(self):
+ """Return negated point."""
+ try:
+ self._update_lock.reader_acquire()
+ return PointJacobi(
+ self.__curve, self.__x, -self.__y, self.__z, self.__order
+ )
+ finally:
+ self._update_lock.reader_release()
+
+
+class Point(object):
+ """A point on an elliptic curve. Altering x and y is forbidding,
+ but they can be read by the x() and y() methods."""
+
+ def __init__(self, curve, x, y, order=None):
+ """curve, x, y, order; order (optional) is the order of this point."""
+ self.__curve = curve
+ if GMPY:
+ self.__x = x and mpz(x)
+ self.__y = y and mpz(y)
+ self.__order = order and mpz(order)
+ else:
+ self.__x = x
+ self.__y = y
+ self.__order = order
+ # self.curve is allowed to be None only for INFINITY:
+ if self.__curve:
+ assert self.__curve.contains_point(x, y)
+ # for curves with cofactor 1, all points that are on the curve are
+ # scalar multiples of the base point, so performing multiplication is
+ # not necessary to verify that. See Section 3.2.2.1 of SEC 1 v2
+ if curve and curve.cofactor() != 1 and order:
+ assert self * order == INFINITY
+
+ def __eq__(self, other):
+ """Return True if the points are identical, False otherwise."""
+ if isinstance(other, Point):
+ return (
+ self.__curve == other.__curve
+ and self.__x == other.__x
+ and self.__y == other.__y
+ )
+ return NotImplemented
+
+ def __neg__(self):
+ return Point(self.__curve, self.__x, self.__curve.p() - self.__y)
+
+ def __add__(self, other):
+ """Add one point to another point."""
+
+ # X9.62 B.3:
+
+ if not isinstance(other, Point):
+ return NotImplemented
+ if other == INFINITY:
+ return self
+ if self == INFINITY:
+ return other
+ assert self.__curve == other.__curve
+ if self.__x == other.__x:
+ if (self.__y + other.__y) % self.__curve.p() == 0:
+ return INFINITY
+ else:
+ return self.double()
+
+ p = self.__curve.p()
+
+ l = (
+ (other.__y - self.__y)
+ * numbertheory.inverse_mod(other.__x - self.__x, p)
+ ) % p
+
+ x3 = (l * l - self.__x - other.__x) % p
+ y3 = (l * (self.__x - x3) - self.__y) % p
+
+ return Point(self.__curve, x3, y3)
+
+ def __mul__(self, other):
+ """Multiply a point by an integer."""
+
+ def leftmost_bit(x):
+ assert x > 0
+ result = 1
+ while result <= x:
+ result = 2 * result
+ return result // 2
+
+ e = other
+ if e == 0 or (self.__order and e % self.__order == 0):
+ return INFINITY
+ if self == INFINITY:
+ return INFINITY
+ if e < 0:
+ return (-self) * (-e)
+
+ # From X9.62 D.3.2:
+
+ e3 = 3 * e
+ negative_self = Point(self.__curve, self.__x, -self.__y, self.__order)
+ i = leftmost_bit(e3) // 2
+ result = self
+ # print_("Multiplying %s by %d (e3 = %d):" % (self, other, e3))
+ while i > 1:
+ result = result.double()
+ if (e3 & i) != 0 and (e & i) == 0:
+ result = result + self
+ if (e3 & i) == 0 and (e & i) != 0:
+ result = result + negative_self
+ # print_(". . . i = %d, result = %s" % ( i, result ))
+ i = i // 2
+
+ return result
+
+ def __rmul__(self, other):
+ """Multiply a point by an integer."""
+
+ return self * other
+
+ def __str__(self):
+ if self == INFINITY:
+ return "infinity"
+ return "(%d,%d)" % (self.__x, self.__y)
+
+ def double(self):
+ """Return a new point that is twice the old."""
+
+ if self == INFINITY:
+ return INFINITY
+
+ # X9.62 B.3:
+
+ p = self.__curve.p()
+ a = self.__curve.a()
+
+ l = (
+ (3 * self.__x * self.__x + a)
+ * numbertheory.inverse_mod(2 * self.__y, p)
+ ) % p
+
+ x3 = (l * l - 2 * self.__x) % p
+ y3 = (l * (self.__x - x3) - self.__y) % p
+
+ return Point(self.__curve, x3, y3)
+
+ def x(self):
+ return self.__x
+
+ def y(self):
+ return self.__y
+
+ def curve(self):
+ return self.__curve
+
+ def order(self):
+ return self.__order
+
+
+# This one point is the Point At Infinity for all purposes:
+INFINITY = Point(None, None, None)
diff --git a/frozen_deps/ecdsa/keys.py b/frozen_deps/ecdsa/keys.py
new file mode 100644
index 0000000..a6fc13f
--- /dev/null
+++ b/frozen_deps/ecdsa/keys.py
@@ -0,0 +1,1453 @@
+"""
+Primary classes for performing signing and verification operations.
+
+.. glossary::
+
+ raw encoding
+ Conversion of public, private keys and signatures (which in
+ mathematical sense are integers or pairs of integers) to strings of
+ bytes that does not use any special tags or encoding rules.
+ For any given curve, all keys of the same type or signatures will be
+ encoded to byte strings of the same length. In more formal sense,
+ the integers are encoded as big-endian, constant length byte strings,
+ where the string length is determined by the curve order (e.g.
+ for NIST256p the order is 256 bits long, so the private key will be 32
+ bytes long while public key will be 64 bytes long). The encoding of a
+ single integer is zero-padded on the left if the numerical value is
+ low. In case of public keys and signatures, which are comprised of two
+ integers, the integers are simply concatenated.
+
+ uncompressed
+ The most common formatting specified in PKIX standards. Specified in
+ X9.62 and SEC1 standards. The only difference between it and
+ :term:`raw encoding` is the prepending of a 0x04 byte. Thus an
+ uncompressed NIST256p public key encoding will be 65 bytes long.
+
+ compressed
+ The public point representation that uses half of bytes of the
+ :term:`uncompressed` encoding (rounded up). It uses the first byte of
+ the encoding to specify the sign of the y coordinate and encodes the
+ x coordinate as-is. The first byte of the encoding is equal to
+ 0x02 or 0x03. Compressed encoding of NIST256p public key will be 33
+ bytes long.
+
+ hybrid
+ A combination of :term:`uncompressed` and :term:`compressed` encodings.
+ Both x and y coordinates are stored just as in :term:`compressed`
+ encoding, but the first byte reflects the sign of the y coordinate. The
+ first byte of the encoding will be equal to 0x06 or 0x7. Hybrid
+ encoding of NIST256p public key will be 65 bytes long.
+
+ PEM
+ The acronym stands for Privacy Enhanced Email, but currently it is used
+ primarily as the way to encode :term:`DER` objects into text that can
+ be either easily copy-pasted or transferred over email.
+ It uses headers like ``-----BEGIN <type of contents>-----`` and footers
+ like ``-----END <type of contents>-----`` to separate multiple
+ types of objects in the same file or the object from the surrounding
+ comments. The actual object stored is base64 encoded.
+
+ DER
+ Distinguished Encoding Rules, the way to encode :term:`ASN.1` objects
+ deterministically and uniquely into byte strings.
+
+ ASN.1
+ Abstract Syntax Notation 1 is a standard description language for
+ specifying serialisation and deserialisation of data structures in a
+ portable and cross-platform way.
+
+ bytes-like object
+ All the types that implement the buffer protocol. That includes
+ ``str`` (only on python2), ``bytes``, ``bytesarray``, ``array.array`
+ and ``memoryview`` of those objects.
+ Please note that ``array.array` serialisation (converting it to byte
+ string) is endianess dependant! Signature computed over ``array.array``
+ of integers on a big-endian system will not be verified on a
+ little-endian system and vice-versa.
+"""
+
+import binascii
+from hashlib import sha1
+from six import PY2, b
+from . import ecdsa
+from . import der
+from . import rfc6979
+from . import ellipticcurve
+from .curves import NIST192p, find_curve
+from .numbertheory import square_root_mod_prime, SquareRootError
+from .ecdsa import RSZeroError
+from .util import string_to_number, number_to_string, randrange
+from .util import sigencode_string, sigdecode_string, bit_length
+from .util import (
+ oid_ecPublicKey,
+ encoded_oid_ecPublicKey,
+ oid_ecDH,
+ oid_ecMQV,
+ MalformedSignature,
+)
+from ._compat import normalise_bytes
+
+
+__all__ = [
+ "BadSignatureError",
+ "BadDigestError",
+ "VerifyingKey",
+ "SigningKey",
+ "MalformedPointError",
+]
+
+
+class BadSignatureError(Exception):
+ """
+ Raised when verification of signature failed.
+
+ Will be raised irrespective of reason of the failure:
+
+ * the calculated or provided hash does not match the signature
+ * the signature does not match the curve/public key
+ * the encoding of the signature is malformed
+ * the size of the signature does not match the curve of the VerifyingKey
+ """
+
+ pass
+
+
+class BadDigestError(Exception):
+ """Raised in case the selected hash is too large for the curve."""
+
+ pass
+
+
+class MalformedPointError(AssertionError):
+ """Raised in case the encoding of private or public key is malformed."""
+
+ pass
+
+
+class VerifyingKey(object):
+ """
+ Class for handling keys that can verify signatures (public keys).
+
+ :ivar ecdsa.curves.Curve curve: The Curve over which all the cryptographic
+ operations will take place
+ :ivar default_hashfunc: the function that will be used for hashing the
+ data. Should implement the same API as hashlib.sha1
+ :vartype default_hashfunc: callable
+ :ivar pubkey: the actual public key
+ :vartype pubkey: ecdsa.ecdsa.Public_key
+ """
+
+ def __init__(self, _error__please_use_generate=None):
+ """Unsupported, please use one of the classmethods to initialise."""
+ if not _error__please_use_generate:
+ raise TypeError(
+ "Please use VerifyingKey.generate() to construct me"
+ )
+ self.curve = None
+ self.default_hashfunc = None
+ self.pubkey = None
+
+ def __repr__(self):
+ pub_key = self.to_string("compressed")
+ return "VerifyingKey.from_string({0!r}, {1!r}, {2})".format(
+ pub_key, self.curve, self.default_hashfunc().name
+ )
+
+ def __eq__(self, other):
+ """Return True if the points are identical, False otherwise."""
+ if isinstance(other, VerifyingKey):
+ return self.curve == other.curve and self.pubkey == other.pubkey
+ return NotImplemented
+
+ @classmethod
+ def from_public_point(
+ cls, point, curve=NIST192p, hashfunc=sha1, validate_point=True
+ ):
+ """
+ Initialise the object from a Point object.
+
+ This is a low-level method, generally you will not want to use it.
+
+ :param point: The point to wrap around, the actual public key
+ :type point: ecdsa.ellipticcurve.Point
+ :param curve: The curve on which the point needs to reside, defaults
+ to NIST192p
+ :type curve: ecdsa.curves.Curve
+ :param hashfunc: The default hash function that will be used for
+ verification, needs to implement the same interface
+ as hashlib.sha1
+ :type hashfunc: callable
+ :type bool validate_point: whether to check if the point lies on curve
+ should always be used if the public point is not a result
+ of our own calculation
+
+ :raises MalformedPointError: if the public point does not lie on the
+ curve
+
+ :return: Initialised VerifyingKey object
+ :rtype: VerifyingKey
+ """
+ self = cls(_error__please_use_generate=True)
+ if not isinstance(point, ellipticcurve.PointJacobi):
+ point = ellipticcurve.PointJacobi.from_affine(point)
+ self.curve = curve
+ self.default_hashfunc = hashfunc
+ try:
+ self.pubkey = ecdsa.Public_key(
+ curve.generator, point, validate_point
+ )
+ except ecdsa.InvalidPointError:
+ raise MalformedPointError("Point does not lie on the curve")
+ self.pubkey.order = curve.order
+ return self
+
+ def precompute(self, lazy=False):
+ """
+ Precompute multiplication tables for faster signature verification.
+
+ Calling this method will cause the library to precompute the
+ scalar multiplication tables, used in signature verification.
+ While it's an expensive operation (comparable to performing
+ as many signatures as the bit size of the curve, i.e. 256 for NIST256p)
+ it speeds up verification 2 times. You should call this method
+ if you expect to verify hundreds of signatures (or more) using the same
+ VerifyingKey object.
+
+ Note: You should call this method only once, this method generates a
+ new precomputation table every time it's called.
+
+ :param bool lazy: whether to calculate the precomputation table now
+ (if set to False) or if it should be delayed to the time of first
+ use (when set to True)
+ """
+ self.pubkey.point = ellipticcurve.PointJacobi.from_affine(
+ self.pubkey.point, True
+ )
+ # as precomputation in now delayed to the time of first use of the
+ # point and we were asked specifically to precompute now, make
+ # sure the precomputation is performed now to preserve the behaviour
+ if not lazy:
+ self.pubkey.point * 2
+
+ @staticmethod
+ def _from_raw_encoding(string, curve):
+ """
+ Decode public point from :term:`raw encoding`.
+
+ :term:`raw encoding` is the same as the :term:`uncompressed` encoding,
+ but without the 0x04 byte at the beginning.
+ """
+ order = curve.order
+ # real assert, from_string() should not call us with different length
+ assert len(string) == curve.verifying_key_length
+ xs = string[: curve.baselen]
+ ys = string[curve.baselen :]
+ if len(xs) != curve.baselen:
+ raise MalformedPointError("Unexpected length of encoded x")
+ if len(ys) != curve.baselen:
+ raise MalformedPointError("Unexpected length of encoded y")
+ x = string_to_number(xs)
+ y = string_to_number(ys)
+
+ return ellipticcurve.PointJacobi(curve.curve, x, y, 1, order)
+
+ @staticmethod
+ def _from_compressed(string, curve):
+ """Decode public point from compressed encoding."""
+ if string[:1] not in (b("\x02"), b("\x03")):
+ raise MalformedPointError("Malformed compressed point encoding")
+
+ is_even = string[:1] == b("\x02")
+ x = string_to_number(string[1:])
+ order = curve.order
+ p = curve.curve.p()
+ alpha = (pow(x, 3, p) + (curve.curve.a() * x) + curve.curve.b()) % p
+ try:
+ beta = square_root_mod_prime(alpha, p)
+ except SquareRootError as e:
+ raise MalformedPointError(
+ "Encoding does not correspond to a point on curve", e
+ )
+ if is_even == bool(beta & 1):
+ y = p - beta
+ else:
+ y = beta
+ return ellipticcurve.PointJacobi(curve.curve, x, y, 1, order)
+
+ @classmethod
+ def _from_hybrid(cls, string, curve, validate_point):
+ """Decode public point from hybrid encoding."""
+ # real assert, from_string() should not call us with different types
+ assert string[:1] in (b("\x06"), b("\x07"))
+
+ # primarily use the uncompressed as it's easiest to handle
+ point = cls._from_raw_encoding(string[1:], curve)
+
+ # but validate if it's self-consistent if we're asked to do that
+ if validate_point and (
+ point.y() & 1
+ and string[:1] != b("\x07")
+ or (not point.y() & 1)
+ and string[:1] != b("\x06")
+ ):
+ raise MalformedPointError("Inconsistent hybrid point encoding")
+
+ return point
+
+ @classmethod
+ def from_string(
+ cls, string, curve=NIST192p, hashfunc=sha1, validate_point=True
+ ):
+ """
+ Initialise the object from byte encoding of public key.
+
+ The method does accept and automatically detect the type of point
+ encoding used. It supports the :term:`raw encoding`,
+ :term:`uncompressed`, :term:`compressed` and :term:`hybrid` encodings.
+
+ Note, while the method is named "from_string" it's a misnomer from
+ Python 2 days when there were no binary strings. In Python 3 the
+ input needs to be a bytes-like object.
+
+ :param string: single point encoding of the public key
+ :type string: :term:`bytes-like object`
+ :param curve: the curve on which the public key is expected to lie
+ :type curve: ecdsa.curves.Curve
+ :param hashfunc: The default hash function that will be used for
+ verification, needs to implement the same interface as hashlib.sha1
+ :type hashfunc: callable
+ :param validate_point: whether to verify that the point lies on the
+ provided curve or not, defaults to True
+ :type validate_point: bool
+
+ :raises MalformedPointError: if the public point does not lie on the
+ curve or the encoding is invalid
+
+ :return: Initialised VerifyingKey object
+ :rtype: VerifyingKey
+ """
+ string = normalise_bytes(string)
+ sig_len = len(string)
+ if sig_len == curve.verifying_key_length:
+ point = cls._from_raw_encoding(string, curve)
+ elif sig_len == curve.verifying_key_length + 1:
+ if string[:1] in (b("\x06"), b("\x07")):
+ point = cls._from_hybrid(string, curve, validate_point)
+ elif string[:1] == b("\x04"):
+ point = cls._from_raw_encoding(string[1:], curve)
+ else:
+ raise MalformedPointError(
+ "Invalid X9.62 encoding of the public point"
+ )
+ elif sig_len == curve.baselen + 1:
+ point = cls._from_compressed(string, curve)
+ else:
+ raise MalformedPointError(
+ "Length of string does not match lengths of "
+ "any of the supported encodings of {0} "
+ "curve.".format(curve.name)
+ )
+ return cls.from_public_point(point, curve, hashfunc, validate_point)
+
+ @classmethod
+ def from_pem(cls, string, hashfunc=sha1):
+ """
+ Initialise from public key stored in :term:`PEM` format.
+
+ The PEM header of the key should be ``BEGIN PUBLIC KEY``.
+
+ See the :func:`~VerifyingKey.from_der()` method for details of the
+ format supported.
+
+ Note: only a single PEM object encoding is supported in provided
+ string.
+
+ :param string: text with PEM-encoded public ECDSA key
+ :type string: str
+
+ :return: Initialised VerifyingKey object
+ :rtype: VerifyingKey
+ """
+ return cls.from_der(der.unpem(string), hashfunc=hashfunc)
+
+ @classmethod
+ def from_der(cls, string, hashfunc=sha1):
+ """
+ Initialise the key stored in :term:`DER` format.
+
+ The expected format of the key is the SubjectPublicKeyInfo structure
+ from RFC5912 (for RSA keys, it's known as the PKCS#1 format)::
+
+ SubjectPublicKeyInfo {PUBLIC-KEY: IOSet} ::= SEQUENCE {
+ algorithm AlgorithmIdentifier {PUBLIC-KEY, {IOSet}},
+ subjectPublicKey BIT STRING
+ }
+
+ Note: only public EC keys are supported by this method. The
+ SubjectPublicKeyInfo.algorithm.algorithm field must specify
+ id-ecPublicKey (see RFC3279).
+
+ Only the named curve encoding is supported, thus the
+ SubjectPublicKeyInfo.algorithm.parameters field needs to be an
+ object identifier. A sequence in that field indicates an explicit
+ parameter curve encoding, this format is not supported. A NULL object
+ in that field indicates an "implicitlyCA" encoding, where the curve
+ parameters come from CA certificate, those, again, are not supported.
+
+ :param string: binary string with the DER encoding of public ECDSA key
+ :type string: bytes-like object
+
+ :return: Initialised VerifyingKey object
+ :rtype: VerifyingKey
+ """
+ string = normalise_bytes(string)
+ # [[oid_ecPublicKey,oid_curve], point_str_bitstring]
+ s1, empty = der.remove_sequence(string)
+ if empty != b"":
+ raise der.UnexpectedDER(
+ "trailing junk after DER pubkey: %s" % binascii.hexlify(empty)
+ )
+ s2, point_str_bitstring = der.remove_sequence(s1)
+ # s2 = oid_ecPublicKey,oid_curve
+ oid_pk, rest = der.remove_object(s2)
+ oid_curve, empty = der.remove_object(rest)
+ if empty != b"":
+ raise der.UnexpectedDER(
+ "trailing junk after DER pubkey objects: %s"
+ % binascii.hexlify(empty)
+ )
+ if not oid_pk == oid_ecPublicKey:
+ raise der.UnexpectedDER(
+ "Unexpected object identifier in DER "
+ "encoding: {0!r}".format(oid_pk)
+ )
+ curve = find_curve(oid_curve)
+ point_str, empty = der.remove_bitstring(point_str_bitstring, 0)
+ if empty != b"":
+ raise der.UnexpectedDER(
+ "trailing junk after pubkey pointstring: %s"
+ % binascii.hexlify(empty)
+ )
+ # raw encoding of point is invalid in DER files
+ if len(point_str) == curve.verifying_key_length:
+ raise der.UnexpectedDER("Malformed encoding of public point")
+ return cls.from_string(point_str, curve, hashfunc=hashfunc)
+
+ @classmethod
+ def from_public_key_recovery(
+ cls, signature, data, curve, hashfunc=sha1, sigdecode=sigdecode_string
+ ):
+ """
+ Return keys that can be used as verifiers of the provided signature.
+
+ Tries to recover the public key that can be used to verify the
+ signature, usually returns two keys like that.
+
+ :param signature: the byte string with the encoded signature
+ :type signature: bytes-like object
+ :param data: the data to be hashed for signature verification
+ :type data: bytes-like object
+ :param curve: the curve over which the signature was performed
+ :type curve: ecdsa.curves.Curve
+ :param hashfunc: The default hash function that will be used for
+ verification, needs to implement the same interface as hashlib.sha1
+ :type hashfunc: callable
+ :param sigdecode: Callable to define the way the signature needs to
+ be decoded to an object, needs to handle `signature` as the
+ first parameter, the curve order (an int) as the second and return
+ a tuple with two integers, "r" as the first one and "s" as the
+ second one. See :func:`ecdsa.util.sigdecode_string` and
+ :func:`ecdsa.util.sigdecode_der` for examples.
+ :type sigdecode: callable
+
+ :return: Initialised VerifyingKey objects
+ :rtype: list of VerifyingKey
+ """
+ data = normalise_bytes(data)
+ digest = hashfunc(data).digest()
+ return cls.from_public_key_recovery_with_digest(
+ signature, digest, curve, hashfunc=hashfunc, sigdecode=sigdecode
+ )
+
+ @classmethod
+ def from_public_key_recovery_with_digest(
+ cls,
+ signature,
+ digest,
+ curve,
+ hashfunc=sha1,
+ sigdecode=sigdecode_string,
+ ):
+ """
+ Return keys that can be used as verifiers of the provided signature.
+
+ Tries to recover the public key that can be used to verify the
+ signature, usually returns two keys like that.
+
+ :param signature: the byte string with the encoded signature
+ :type signature: bytes-like object
+ :param digest: the hash value of the message signed by the signature
+ :type digest: bytes-like object
+ :param curve: the curve over which the signature was performed
+ :type curve: ecdsa.curves.Curve
+ :param hashfunc: The default hash function that will be used for
+ verification, needs to implement the same interface as hashlib.sha1
+ :type hashfunc: callable
+ :param sigdecode: Callable to define the way the signature needs to
+ be decoded to an object, needs to handle `signature` as the
+ first parameter, the curve order (an int) as the second and return
+ a tuple with two integers, "r" as the first one and "s" as the
+ second one. See :func:`ecdsa.util.sigdecode_string` and
+ :func:`ecdsa.util.sigdecode_der` for examples.
+ :type sigdecode: callable
+
+
+ :return: Initialised VerifyingKey object
+ :rtype: VerifyingKey
+ """
+ generator = curve.generator
+ r, s = sigdecode(signature, generator.order())
+ sig = ecdsa.Signature(r, s)
+
+ digest = normalise_bytes(digest)
+ digest_as_number = string_to_number(digest)
+ pks = sig.recover_public_keys(digest_as_number, generator)
+
+ # Transforms the ecdsa.Public_key object into a VerifyingKey
+ verifying_keys = [
+ cls.from_public_point(pk.point, curve, hashfunc) for pk in pks
+ ]
+ return verifying_keys
+
+ def _raw_encode(self):
+ """Convert the public key to the :term:`raw encoding`."""
+ order = self.pubkey.order
+ x_str = number_to_string(self.pubkey.point.x(), order)
+ y_str = number_to_string(self.pubkey.point.y(), order)
+ return x_str + y_str
+
+ def _compressed_encode(self):
+ """Encode the public point into the compressed form."""
+ order = self.pubkey.order
+ x_str = number_to_string(self.pubkey.point.x(), order)
+ if self.pubkey.point.y() & 1:
+ return b("\x03") + x_str
+ else:
+ return b("\x02") + x_str
+
+ def _hybrid_encode(self):
+ """Encode the public point into the hybrid form."""
+ raw_enc = self._raw_encode()
+ if self.pubkey.point.y() & 1:
+ return b("\x07") + raw_enc
+ else:
+ return b("\x06") + raw_enc
+
+ def to_string(self, encoding="raw"):
+ """
+ Convert the public key to a byte string.
+
+ The method by default uses the :term:`raw encoding` (specified
+ by `encoding="raw"`. It can also output keys in :term:`uncompressed`,
+ :term:`compressed` and :term:`hybrid` formats.
+
+ Remember that the curve identification is not part of the encoding
+ so to decode the point using :func:`~VerifyingKey.from_string`, curve
+ needs to be specified.
+
+ Note: while the method is called "to_string", it's a misnomer from
+ Python 2 days when character strings and byte strings shared type.
+ On Python 3 the returned type will be `bytes`.
+
+ :return: :term:`raw encoding` of the public key (public point) on the
+ curve
+ :rtype: bytes
+ """
+ assert encoding in ("raw", "uncompressed", "compressed", "hybrid")
+ if encoding == "raw":
+ return self._raw_encode()
+ elif encoding == "uncompressed":
+ return b("\x04") + self._raw_encode()
+ elif encoding == "hybrid":
+ return self._hybrid_encode()
+ else:
+ return self._compressed_encode()
+
+ def to_pem(self, point_encoding="uncompressed"):
+ """
+ Convert the public key to the :term:`PEM` format.
+
+ The PEM header of the key will be ``BEGIN PUBLIC KEY``.
+
+ The format of the key is described in the
+ :func:`~VerifyingKey.from_der()` method.
+ This method supports only "named curve" encoding of keys.
+
+ :param str point_encoding: specification of the encoding format
+ of public keys. "uncompressed" is most portable, "compressed" is
+ smallest. "hybrid" is uncommon and unsupported by most
+ implementations, it is as big as "uncompressed".
+
+ :return: portable encoding of the public key
+ :rtype: bytes
+
+ .. warning:: The PEM is encoded to US-ASCII, it needs to be
+ re-encoded if the system is incompatible (e.g. uses UTF-16)
+ """
+ return der.topem(self.to_der(point_encoding), "PUBLIC KEY")
+
+ def to_der(self, point_encoding="uncompressed"):
+ """
+ Convert the public key to the :term:`DER` format.
+
+ The format of the key is described in the
+ :func:`~VerifyingKey.from_der()` method.
+ This method supports only "named curve" encoding of keys.
+
+ :param str point_encoding: specification of the encoding format
+ of public keys. "uncompressed" is most portable, "compressed" is
+ smallest. "hybrid" is uncommon and unsupported by most
+ implementations, it is as big as "uncompressed".
+
+ :return: DER encoding of the public key
+ :rtype: bytes
+ """
+ if point_encoding == "raw":
+ raise ValueError("raw point_encoding not allowed in DER")
+ point_str = self.to_string(point_encoding)
+ return der.encode_sequence(
+ der.encode_sequence(
+ encoded_oid_ecPublicKey, self.curve.encoded_oid
+ ),
+ # 0 is the number of unused bits in the
+ # bit string
+ der.encode_bitstring(point_str, 0),
+ )
+
+ def verify(
+ self,
+ signature,
+ data,
+ hashfunc=None,
+ sigdecode=sigdecode_string,
+ allow_truncate=True,
+ ):
+ """
+ Verify a signature made over provided data.
+
+ Will hash `data` to verify the signature.
+
+ By default expects signature in :term:`raw encoding`. Can also be used
+ to verify signatures in ASN.1 DER encoding by using
+ :func:`ecdsa.util.sigdecode_der`
+ as the `sigdecode` parameter.
+
+ :param signature: encoding of the signature
+ :type signature: sigdecode method dependant
+ :param data: data signed by the `signature`, will be hashed using
+ `hashfunc`, if specified, or default hash function
+ :type data: bytes like object
+ :param hashfunc: The default hash function that will be used for
+ verification, needs to implement the same interface as hashlib.sha1
+ :type hashfunc: callable
+ :param sigdecode: Callable to define the way the signature needs to
+ be decoded to an object, needs to handle `signature` as the
+ first parameter, the curve order (an int) as the second and return
+ a tuple with two integers, "r" as the first one and "s" as the
+ second one. See :func:`ecdsa.util.sigdecode_string` and
+ :func:`ecdsa.util.sigdecode_der` for examples.
+ :type sigdecode: callable
+ :param bool allow_truncate: if True, the provided digest can have
+ bigger bit-size than the order of the curve, the extra bits (at
+ the end of the digest) will be truncated. Use it when verifying
+ SHA-384 output using NIST256p or in similar situations. Defaults to
+ True.
+
+ :raises BadSignatureError: if the signature is invalid or malformed
+
+ :return: True if the verification was successful
+ :rtype: bool
+ """
+ # signature doesn't have to be a bytes-like-object so don't normalise
+ # it, the decoders will do that
+ data = normalise_bytes(data)
+
+ hashfunc = hashfunc or self.default_hashfunc
+ digest = hashfunc(data).digest()
+ return self.verify_digest(signature, digest, sigdecode, allow_truncate)
+
+ def verify_digest(
+ self,
+ signature,
+ digest,
+ sigdecode=sigdecode_string,
+ allow_truncate=False,
+ ):
+ """
+ Verify a signature made over provided hash value.
+
+ By default expects signature in :term:`raw encoding`. Can also be used
+ to verify signatures in ASN.1 DER encoding by using
+ :func:`ecdsa.util.sigdecode_der`
+ as the `sigdecode` parameter.
+
+ :param signature: encoding of the signature
+ :type signature: sigdecode method dependant
+ :param digest: raw hash value that the signature authenticates.
+ :type digest: bytes like object
+ :param sigdecode: Callable to define the way the signature needs to
+ be decoded to an object, needs to handle `signature` as the
+ first parameter, the curve order (an int) as the second and return
+ a tuple with two integers, "r" as the first one and "s" as the
+ second one. See :func:`ecdsa.util.sigdecode_string` and
+ :func:`ecdsa.util.sigdecode_der` for examples.
+ :type sigdecode: callable
+ :param bool allow_truncate: if True, the provided digest can have
+ bigger bit-size than the order of the curve, the extra bits (at
+ the end of the digest) will be truncated. Use it when verifying
+ SHA-384 output using NIST256p or in similar situations.
+
+ :raises BadSignatureError: if the signature is invalid or malformed
+ :raises BadDigestError: if the provided digest is too big for the curve
+ associated with this VerifyingKey and allow_truncate was not set
+
+ :return: True if the verification was successful
+ :rtype: bool
+ """
+ # signature doesn't have to be a bytes-like-object so don't normalise
+ # it, the decoders will do that
+ digest = normalise_bytes(digest)
+ if not allow_truncate and len(digest) > self.curve.baselen:
+ raise BadDigestError(
+ "this curve (%s) is too short "
+ "for your digest (%d)" % (self.curve.name, 8 * len(digest))
+ )
+ number = string_to_number(digest)
+ if allow_truncate:
+ max_length = bit_length(self.curve.order)
+ # we don't use bit_length(number) as that truncates leading zeros
+ length = len(digest) * 8
+
+ # See NIST FIPS 186-4:
+ #
+ # When the length of the output of the hash function is greater
+ # than N (i.e., the bit length of q), then the leftmost N bits of
+ # the hash function output block shall be used in any calculation
+ # using the hash function output during the generation or
+ # verification of a digital signature.
+ #
+ # as such, we need to shift-out the low-order bits:
+ number >>= max(0, length - max_length)
+
+ try:
+ r, s = sigdecode(signature, self.pubkey.order)
+ except (der.UnexpectedDER, MalformedSignature) as e:
+ raise BadSignatureError("Malformed formatting of signature", e)
+ sig = ecdsa.Signature(r, s)
+ if self.pubkey.verifies(number, sig):
+ return True
+ raise BadSignatureError("Signature verification failed")
+
+
+class SigningKey(object):
+ """
+ Class for handling keys that can create signatures (private keys).
+
+ :ivar ecdsa.curves.Curve curve: The Curve over which all the cryptographic
+ operations will take place
+ :ivar default_hashfunc: the function that will be used for hashing the
+ data. Should implement the same API as hashlib.sha1
+ :ivar int baselen: the length of a :term:`raw encoding` of private key
+ :ivar ecdsa.keys.VerifyingKey verifying_key: the public key
+ associated with this private key
+ :ivar ecdsa.ecdsa.Private_key privkey: the actual private key
+ """
+
+ def __init__(self, _error__please_use_generate=None):
+ """Unsupported, please use one of the classmethods to initialise."""
+ if not _error__please_use_generate:
+ raise TypeError("Please use SigningKey.generate() to construct me")
+ self.curve = None
+ self.default_hashfunc = None
+ self.baselen = None
+ self.verifying_key = None
+ self.privkey = None
+
+ def __eq__(self, other):
+ """Return True if the points are identical, False otherwise."""
+ if isinstance(other, SigningKey):
+ return (
+ self.curve == other.curve
+ and self.verifying_key == other.verifying_key
+ and self.privkey == other.privkey
+ )
+ return NotImplemented
+
+ @classmethod
+ def generate(cls, curve=NIST192p, entropy=None, hashfunc=sha1):
+ """
+ Generate a random private key.
+
+ :param curve: The curve on which the point needs to reside, defaults
+ to NIST192p
+ :type curve: ecdsa.curves.Curve
+ :param entropy: Source of randomness for generating the private keys,
+ should provide cryptographically secure random numbers if the keys
+ need to be secure. Uses os.urandom() by default.
+ :type entropy: callable
+ :param hashfunc: The default hash function that will be used for
+ signing, needs to implement the same interface
+ as hashlib.sha1
+ :type hashfunc: callable
+
+ :return: Initialised SigningKey object
+ :rtype: SigningKey
+ """
+ secexp = randrange(curve.order, entropy)
+ return cls.from_secret_exponent(secexp, curve, hashfunc)
+
+ @classmethod
+ def from_secret_exponent(cls, secexp, curve=NIST192p, hashfunc=sha1):
+ """
+ Create a private key from a random integer.
+
+ Note: it's a low level method, it's recommended to use the
+ :func:`~SigningKey.generate` method to create private keys.
+
+ :param int secexp: secret multiplier (the actual private key in ECDSA).
+ Needs to be an integer between 1 and the curve order.
+ :param curve: The curve on which the point needs to reside
+ :type curve: ecdsa.curves.Curve
+ :param hashfunc: The default hash function that will be used for
+ signing, needs to implement the same interface
+ as hashlib.sha1
+ :type hashfunc: callable
+
+ :raises MalformedPointError: when the provided secexp is too large
+ or too small for the curve selected
+ :raises RuntimeError: if the generation of public key from private
+ key failed
+
+ :return: Initialised SigningKey object
+ :rtype: SigningKey
+ """
+ self = cls(_error__please_use_generate=True)
+ self.curve = curve
+ self.default_hashfunc = hashfunc
+ self.baselen = curve.baselen
+ n = curve.order
+ if not 1 <= secexp < n:
+ raise MalformedPointError(
+ "Invalid value for secexp, expected integer "
+ "between 1 and {0}".format(n)
+ )
+ pubkey_point = curve.generator * secexp
+ if hasattr(pubkey_point, "scale"):
+ pubkey_point = pubkey_point.scale()
+ self.verifying_key = VerifyingKey.from_public_point(
+ pubkey_point, curve, hashfunc, False
+ )
+ pubkey = self.verifying_key.pubkey
+ self.privkey = ecdsa.Private_key(pubkey, secexp)
+ self.privkey.order = n
+ return self
+
+ @classmethod
+ def from_string(cls, string, curve=NIST192p, hashfunc=sha1):
+ """
+ Decode the private key from :term:`raw encoding`.
+
+ Note: the name of this method is a misnomer coming from days of
+ Python 2, when binary strings and character strings shared a type.
+ In Python 3, the expected type is `bytes`.
+
+ :param string: the raw encoding of the private key
+ :type string: bytes like object
+ :param curve: The curve on which the point needs to reside
+ :type curve: ecdsa.curves.Curve
+ :param hashfunc: The default hash function that will be used for
+ signing, needs to implement the same interface
+ as hashlib.sha1
+ :type hashfunc: callable
+
+ :raises MalformedPointError: if the length of encoding doesn't match
+ the provided curve or the encoded values is too large
+ :raises RuntimeError: if the generation of public key from private
+ key failed
+
+ :return: Initialised SigningKey object
+ :rtype: SigningKey
+ """
+ string = normalise_bytes(string)
+ if len(string) != curve.baselen:
+ raise MalformedPointError(
+ "Invalid length of private key, received {0}, "
+ "expected {1}".format(len(string), curve.baselen)
+ )
+ secexp = string_to_number(string)
+ return cls.from_secret_exponent(secexp, curve, hashfunc)
+
+ @classmethod
+ def from_pem(cls, string, hashfunc=sha1):
+ """
+ Initialise from key stored in :term:`PEM` format.
+
+ The PEM formats supported are the un-encrypted RFC5915
+ (the ssleay format) supported by OpenSSL, and the more common
+ un-encrypted RFC5958 (the PKCS #8 format).
+
+ The legacy format files have the header with the string
+ ``BEGIN EC PRIVATE KEY``.
+ PKCS#8 files have the header ``BEGIN PRIVATE KEY``.
+ Encrypted files (ones that include the string
+ ``Proc-Type: 4,ENCRYPTED``
+ right after the PEM header) are not supported.
+
+ See :func:`~SigningKey.from_der` for ASN.1 syntax of the objects in
+ this files.
+
+ :param string: text with PEM-encoded private ECDSA key
+ :type string: str
+
+ :raises MalformedPointError: if the length of encoding doesn't match
+ the provided curve or the encoded values is too large
+ :raises RuntimeError: if the generation of public key from private
+ key failed
+ :raises UnexpectedDER: if the encoding of the PEM file is incorrect
+
+ :return: Initialised SigningKey object
+ :rtype: SigningKey
+ """
+ if not PY2 and isinstance(string, str):
+ string = string.encode()
+
+ # The privkey pem may have multiple sections, commonly it also has
+ # "EC PARAMETERS", we need just "EC PRIVATE KEY". PKCS#8 should not
+ # have the "EC PARAMETERS" section; it's just "PRIVATE KEY".
+ private_key_index = string.find(b"-----BEGIN EC PRIVATE KEY-----")
+ if private_key_index == -1:
+ private_key_index = string.index(b"-----BEGIN PRIVATE KEY-----")
+
+ return cls.from_der(der.unpem(string[private_key_index:]), hashfunc)
+
+ @classmethod
+ def from_der(cls, string, hashfunc=sha1):
+ """
+ Initialise from key stored in :term:`DER` format.
+
+ The DER formats supported are the un-encrypted RFC5915
+ (the ssleay format) supported by OpenSSL, and the more common
+ un-encrypted RFC5958 (the PKCS #8 format).
+
+ Both formats contain an ASN.1 object following the syntax specified
+ in RFC5915::
+
+ ECPrivateKey ::= SEQUENCE {
+ version INTEGER { ecPrivkeyVer1(1) }} (ecPrivkeyVer1),
+ privateKey OCTET STRING,
+ parameters [0] ECParameters {{ NamedCurve }} OPTIONAL,
+ publicKey [1] BIT STRING OPTIONAL
+ }
+
+ `publicKey` field is ignored completely (errors, if any, in it will
+ be undetected).
+
+ The only format supported for the `parameters` field is the named
+ curve method. Explicit encoding of curve parameters is not supported.
+ In the legacy ssleay format, this implementation requires the optional
+ `parameters` field to get the curve name. In PKCS #8 format, the curve
+ is part of the PrivateKeyAlgorithmIdentifier.
+
+ The PKCS #8 format includes an ECPrivateKey object as the `privateKey`
+ field within a larger structure:
+
+ OneAsymmetricKey ::= SEQUENCE {
+ version Version,
+ privateKeyAlgorithm PrivateKeyAlgorithmIdentifier,
+ privateKey PrivateKey,
+ attributes [0] Attributes OPTIONAL,
+ ...,
+ [[2: publicKey [1] PublicKey OPTIONAL ]],
+ ...
+ }
+
+ The `attributes` and `publicKey` fields are completely ignored; errors
+ in them will not be detected.
+
+ :param string: binary string with DER-encoded private ECDSA key
+ :type string: bytes like object
+
+ :raises MalformedPointError: if the length of encoding doesn't match
+ the provided curve or the encoded values is too large
+ :raises RuntimeError: if the generation of public key from private
+ key failed
+ :raises UnexpectedDER: if the encoding of the DER file is incorrect
+
+ :return: Initialised SigningKey object
+ :rtype: SigningKey
+ """
+ s = normalise_bytes(string)
+ curve = None
+
+ s, empty = der.remove_sequence(s)
+ if empty != b(""):
+ raise der.UnexpectedDER(
+ "trailing junk after DER privkey: %s" % binascii.hexlify(empty)
+ )
+
+ version, s = der.remove_integer(s)
+
+ # At this point, PKCS #8 has a sequence containing the algorithm
+ # identifier and the curve identifier. The ssleay format instead has
+ # an octet string containing the key data, so this is how we can
+ # distinguish the two formats.
+ if der.is_sequence(s):
+ if version not in (0, 1):
+ raise der.UnexpectedDER(
+ "expected version '0' or '1' at start of privkey, got %d"
+ % version
+ )
+
+ sequence, s = der.remove_sequence(s)
+ algorithm_oid, algorithm_identifier = der.remove_object(sequence)
+ curve_oid, empty = der.remove_object(algorithm_identifier)
+ curve = find_curve(curve_oid)
+
+ if algorithm_oid not in (oid_ecPublicKey, oid_ecDH, oid_ecMQV):
+ raise der.UnexpectedDER(
+ "unexpected algorithm identifier '%s'" % (algorithm_oid,)
+ )
+ if empty != b"":
+ raise der.UnexpectedDER(
+ "unexpected data after algorithm identifier: %s"
+ % binascii.hexlify(empty)
+ )
+
+ # Up next is an octet string containing an ECPrivateKey. Ignore
+ # the optional "attributes" and "publicKey" fields that come after.
+ s, _ = der.remove_octet_string(s)
+
+ # Unpack the ECPrivateKey to get to the key data octet string,
+ # and rejoin the ssleay parsing path.
+ s, empty = der.remove_sequence(s)
+ if empty != b(""):
+ raise der.UnexpectedDER(
+ "trailing junk after DER privkey: %s"
+ % binascii.hexlify(empty)
+ )
+
+ version, s = der.remove_integer(s)
+
+ # The version of the ECPrivateKey must be 1.
+ if version != 1:
+ raise der.UnexpectedDER(
+ "expected version '1' at start of DER privkey, got %d"
+ % version
+ )
+
+ privkey_str, s = der.remove_octet_string(s)
+
+ if not curve:
+ tag, curve_oid_str, s = der.remove_constructed(s)
+ if tag != 0:
+ raise der.UnexpectedDER(
+ "expected tag 0 in DER privkey, got %d" % tag
+ )
+ curve_oid, empty = der.remove_object(curve_oid_str)
+ if empty != b(""):
+ raise der.UnexpectedDER(
+ "trailing junk after DER privkey "
+ "curve_oid: %s" % binascii.hexlify(empty)
+ )
+ curve = find_curve(curve_oid)
+
+ # we don't actually care about the following fields
+ #
+ # tag, pubkey_bitstring, s = der.remove_constructed(s)
+ # if tag != 1:
+ # raise der.UnexpectedDER("expected tag 1 in DER privkey, got %d"
+ # % tag)
+ # pubkey_str = der.remove_bitstring(pubkey_bitstring, 0)
+ # if empty != "":
+ # raise der.UnexpectedDER("trailing junk after DER privkey "
+ # "pubkeystr: %s"
+ # % binascii.hexlify(empty))
+
+ # our from_string method likes fixed-length privkey strings
+ if len(privkey_str) < curve.baselen:
+ privkey_str = (
+ b("\x00") * (curve.baselen - len(privkey_str)) + privkey_str
+ )
+ return cls.from_string(privkey_str, curve, hashfunc)
+
+ def to_string(self):
+ """
+ Convert the private key to :term:`raw encoding`.
+
+ Note: while the method is named "to_string", its name comes from
+ Python 2 days, when binary and character strings used the same type.
+ The type used in Python 3 is `bytes`.
+
+ :return: raw encoding of private key
+ :rtype: bytes
+ """
+ secexp = self.privkey.secret_multiplier
+ s = number_to_string(secexp, self.privkey.order)
+ return s
+
+ def to_pem(self, point_encoding="uncompressed", format="ssleay"):
+ """
+ Convert the private key to the :term:`PEM` format.
+
+ See :func:`~SigningKey.from_pem` method for format description.
+
+ Only the named curve format is supported.
+ The public key will be included in generated string.
+
+ The PEM header will specify ``BEGIN EC PRIVATE KEY`` or
+ ``BEGIN PRIVATE KEY``, depending on the desired format.
+
+ :param str point_encoding: format to use for encoding public point
+ :param str format: either ``ssleay`` (default) or ``pkcs8``
+
+ :return: PEM encoded private key
+ :rtype: bytes
+
+ .. warning:: The PEM is encoded to US-ASCII, it needs to be
+ re-encoded if the system is incompatible (e.g. uses UTF-16)
+ """
+ # TODO: "BEGIN ECPARAMETERS"
+ assert format in ("ssleay", "pkcs8")
+ header = "EC PRIVATE KEY" if format == "ssleay" else "PRIVATE KEY"
+ return der.topem(self.to_der(point_encoding, format), header)
+
+ def to_der(self, point_encoding="uncompressed", format="ssleay"):
+ """
+ Convert the private key to the :term:`DER` format.
+
+ See :func:`~SigningKey.from_der` method for format specification.
+
+ Only the named curve format is supported.
+ The public key will be included in the generated string.
+
+ :param str point_encoding: format to use for encoding public point
+ :param str format: either ``ssleay`` (default) or ``pkcs8``
+
+ :return: DER encoded private key
+ :rtype: bytes
+ """
+ # SEQ([int(1), octetstring(privkey),cont[0], oid(secp224r1),
+ # cont[1],bitstring])
+ if point_encoding == "raw":
+ raise ValueError("raw encoding not allowed in DER")
+ assert format in ("ssleay", "pkcs8")
+ encoded_vk = self.get_verifying_key().to_string(point_encoding)
+ # the 0 in encode_bitstring specifies the number of unused bits
+ # in the `encoded_vk` string
+ ec_private_key = der.encode_sequence(
+ der.encode_integer(1),
+ der.encode_octet_string(self.to_string()),
+ der.encode_constructed(0, self.curve.encoded_oid),
+ der.encode_constructed(1, der.encode_bitstring(encoded_vk, 0)),
+ )
+
+ if format == "ssleay":
+ return ec_private_key
+ else:
+ return der.encode_sequence(
+ # version = 1 means the public key is not present in the
+ # top-level structure.
+ der.encode_integer(1),
+ der.encode_sequence(
+ der.encode_oid(*oid_ecPublicKey), self.curve.encoded_oid
+ ),
+ der.encode_octet_string(ec_private_key),
+ )
+
+ def get_verifying_key(self):
+ """
+ Return the VerifyingKey associated with this private key.
+
+ Equivalent to reading the `verifying_key` field of an instance.
+
+ :return: a public key that can be used to verify the signatures made
+ with this SigningKey
+ :rtype: VerifyingKey
+ """
+ return self.verifying_key
+
+ def sign_deterministic(
+ self,
+ data,
+ hashfunc=None,
+ sigencode=sigencode_string,
+ extra_entropy=b"",
+ ):
+ """
+ Create signature over data using the deterministic RFC6679 algorithm.
+
+ The data will be hashed using the `hashfunc` function before signing.
+
+ This is the recommended method for performing signatures when hashing
+ of data is necessary.
+
+ :param data: data to be hashed and computed signature over
+ :type data: bytes like object
+ :param hashfunc: hash function to use for computing the signature,
+ if unspecified, the default hash function selected during
+ object initialisation will be used (see
+ `VerifyingKey.default_hashfunc`). The object needs to implement
+ the same interface as hashlib.sha1.
+ :type hashfunc: callable
+ :param sigencode: function used to encode the signature.
+ The function needs to accept three parameters: the two integers
+ that are the signature and the order of the curve over which the
+ signature was computed. It needs to return an encoded signature.
+ See `ecdsa.util.sigencode_string` and `ecdsa.util.sigencode_der`
+ as examples of such functions.
+ :type sigencode: callable
+ :param extra_entropy: additional data that will be fed into the random
+ number generator used in the RFC6979 process. Entirely optional.
+ :type extra_entropy: bytes like object
+
+ :return: encoded signature over `data`
+ :rtype: bytes or sigencode function dependant type
+ """
+ hashfunc = hashfunc or self.default_hashfunc
+ data = normalise_bytes(data)
+ extra_entropy = normalise_bytes(extra_entropy)
+ digest = hashfunc(data).digest()
+
+ return self.sign_digest_deterministic(
+ digest,
+ hashfunc=hashfunc,
+ sigencode=sigencode,
+ extra_entropy=extra_entropy,
+ allow_truncate=True,
+ )
+
+ def sign_digest_deterministic(
+ self,
+ digest,
+ hashfunc=None,
+ sigencode=sigencode_string,
+ extra_entropy=b"",
+ allow_truncate=False,
+ ):
+ """
+ Create signature for digest using the deterministic RFC6679 algorithm.
+
+ `digest` should be the output of cryptographically secure hash function
+ like SHA256 or SHA-3-256.
+
+ This is the recommended method for performing signatures when no
+ hashing of data is necessary.
+
+ :param digest: hash of data that will be signed
+ :type digest: bytes like object
+ :param hashfunc: hash function to use for computing the random "k"
+ value from RFC6979 process,
+ if unspecified, the default hash function selected during
+ object initialisation will be used (see
+ `VerifyingKey.default_hashfunc`). The object needs to implement
+ the same interface as hashlib.sha1.
+ :type hashfunc: callable
+ :param sigencode: function used to encode the signature.
+ The function needs to accept three parameters: the two integers
+ that are the signature and the order of the curve over which the
+ signature was computed. It needs to return an encoded signature.
+ See `ecdsa.util.sigencode_string` and `ecdsa.util.sigencode_der`
+ as examples of such functions.
+ :type sigencode: callable
+ :param extra_entropy: additional data that will be fed into the random
+ number generator used in the RFC6979 process. Entirely optional.
+ :type extra_entropy: bytes like object
+ :param bool allow_truncate: if True, the provided digest can have
+ bigger bit-size than the order of the curve, the extra bits (at
+ the end of the digest) will be truncated. Use it when signing
+ SHA-384 output using NIST256p or in similar situations.
+
+ :return: encoded signature for the `digest` hash
+ :rtype: bytes or sigencode function dependant type
+ """
+ secexp = self.privkey.secret_multiplier
+ hashfunc = hashfunc or self.default_hashfunc
+ digest = normalise_bytes(digest)
+ extra_entropy = normalise_bytes(extra_entropy)
+
+ def simple_r_s(r, s, order):
+ return r, s, order
+
+ retry_gen = 0
+ while True:
+ k = rfc6979.generate_k(
+ self.curve.generator.order(),
+ secexp,
+ hashfunc,
+ digest,
+ retry_gen=retry_gen,
+ extra_entropy=extra_entropy,
+ )
+ try:
+ r, s, order = self.sign_digest(
+ digest,
+ sigencode=simple_r_s,
+ k=k,
+ allow_truncate=allow_truncate,
+ )
+ break
+ except RSZeroError:
+ retry_gen += 1
+
+ return sigencode(r, s, order)
+
+ def sign(
+ self,
+ data,
+ entropy=None,
+ hashfunc=None,
+ sigencode=sigencode_string,
+ k=None,
+ allow_truncate=True,
+ ):
+ """
+ Create signature over data using the probabilistic ECDSA algorithm.
+
+ This method uses the standard ECDSA algorithm that requires a
+ cryptographically secure random number generator.
+
+ It's recommended to use the :func:`~SigningKey.sign_deterministic`
+ method instead of this one.
+
+ :param data: data that will be hashed for signing
+ :type data: bytes like object
+ :param callable entropy: randomness source, os.urandom by default
+ :param hashfunc: hash function to use for hashing the provided `data`.
+ If unspecified the default hash function selected during
+ object initialisation will be used (see
+ `VerifyingKey.default_hashfunc`).
+ Should behave like hashlib.sha1. The output length of the
+ hash (in bytes) must not be longer than the length of the curve
+ order (rounded up to the nearest byte), so using SHA256 with
+ NIST256p is ok, but SHA256 with NIST192p is not. (In the 2**-96ish
+ unlikely event of a hash output larger than the curve order, the
+ hash will effectively be wrapped mod n).
+ Use hashfunc=hashlib.sha1 to match openssl's -ecdsa-with-SHA1 mode,
+ or hashfunc=hashlib.sha256 for openssl-1.0.0's -ecdsa-with-SHA256.
+ :type hashfunc: callable
+ :param sigencode: function used to encode the signature.
+ The function needs to accept three parameters: the two integers
+ that are the signature and the order of the curve over which the
+ signature was computed. It needs to return an encoded signature.
+ See `ecdsa.util.sigencode_string` and `ecdsa.util.sigencode_der`
+ as examples of such functions.
+ :type sigencode: callable
+ :param int k: a pre-selected nonce for calculating the signature.
+ In typical use cases, it should be set to None (the default) to
+ allow its generation from an entropy source.
+ :param bool allow_truncate: if True, the provided digest can have
+ bigger bit-size than the order of the curve, the extra bits (at
+ the end of the digest) will be truncated. Use it when signing
+ SHA-384 output using NIST256p or in similar situations. True by
+ default.
+
+ :raises RSZeroError: in the unlikely event when "r" parameter or
+ "s" parameter is equal 0 as that would leak the key. Calee should
+ try a better entropy source or different 'k' in such case.
+
+ :return: encoded signature of the hash of `data`
+ :rtype: bytes or sigencode function dependant type
+ """
+ hashfunc = hashfunc or self.default_hashfunc
+ data = normalise_bytes(data)
+ h = hashfunc(data).digest()
+ return self.sign_digest(h, entropy, sigencode, k, allow_truncate)
+
+ def sign_digest(
+ self,
+ digest,
+ entropy=None,
+ sigencode=sigencode_string,
+ k=None,
+ allow_truncate=False,
+ ):
+ """
+ Create signature over digest using the probabilistic ECDSA algorithm.
+
+ This method uses the standard ECDSA algorithm that requires a
+ cryptographically secure random number generator.
+
+ This method does not hash the input.
+
+ It's recommended to use the
+ :func:`~SigningKey.sign_digest_deterministic` method
+ instead of this one.
+
+ :param digest: hash value that will be signed
+ :type digest: bytes like object
+ :param callable entropy: randomness source, os.urandom by default
+ :param sigencode: function used to encode the signature.
+ The function needs to accept three parameters: the two integers
+ that are the signature and the order of the curve over which the
+ signature was computed. It needs to return an encoded signature.
+ See `ecdsa.util.sigencode_string` and `ecdsa.util.sigencode_der`
+ as examples of such functions.
+ :type sigencode: callable
+ :param int k: a pre-selected nonce for calculating the signature.
+ In typical use cases, it should be set to None (the default) to
+ allow its generation from an entropy source.
+ :param bool allow_truncate: if True, the provided digest can have
+ bigger bit-size than the order of the curve, the extra bits (at
+ the end of the digest) will be truncated. Use it when signing
+ SHA-384 output using NIST256p or in similar situations.
+
+ :raises RSZeroError: in the unlikely event when "r" parameter or
+ "s" parameter is equal 0 as that would leak the key. Calee should
+ try a better entropy source in such case.
+
+ :return: encoded signature for the `digest` hash
+ :rtype: bytes or sigencode function dependant type
+ """
+ digest = normalise_bytes(digest)
+ if allow_truncate:
+ digest = digest[: self.curve.baselen]
+ if len(digest) > self.curve.baselen:
+ raise BadDigestError(
+ "this curve (%s) is too short "
+ "for your digest (%d)" % (self.curve.name, 8 * len(digest))
+ )
+ number = string_to_number(digest)
+ r, s = self.sign_number(number, entropy, k)
+ return sigencode(r, s, self.privkey.order)
+
+ def sign_number(self, number, entropy=None, k=None):
+ """
+ Sign an integer directly.
+
+ Note, this is a low level method, usually you will want to use
+ :func:`~SigningKey.sign_deterministic` or
+ :func:`~SigningKey.sign_digest_deterministic`.
+
+ :param int number: number to sign using the probabilistic ECDSA
+ algorithm.
+ :param callable entropy: entropy source, os.urandom by default
+ :param int k: pre-selected nonce for signature operation. If unset
+ it will be selected at random using the entropy source.
+
+ :raises RSZeroError: in the unlikely event when "r" parameter or
+ "s" parameter is equal 0 as that would leak the key. Calee should
+ try a different 'k' in such case.
+
+ :return: the "r" and "s" parameters of the signature
+ :rtype: tuple of ints
+ """
+ order = self.privkey.order
+
+ if k is not None:
+ _k = k
+ else:
+ _k = randrange(order, entropy)
+
+ assert 1 <= _k < order
+ sig = self.privkey.sign(number, _k)
+ return sig.r, sig.s
diff --git a/frozen_deps/ecdsa/numbertheory.py b/frozen_deps/ecdsa/numbertheory.py
new file mode 100644
index 0000000..e5cc888
--- /dev/null
+++ b/frozen_deps/ecdsa/numbertheory.py
@@ -0,0 +1,810 @@
+#! /usr/bin/env python
+#
+# Provide some simple capabilities from number theory.
+#
+# Version of 2008.11.14.
+#
+# Written in 2005 and 2006 by Peter Pearson and placed in the public domain.
+# Revision history:
+# 2008.11.14: Use pow(base, exponent, modulus) for modular_exp.
+# Make gcd and lcm accept arbitrarly many arguments.
+
+from __future__ import division
+
+from six import integer_types, PY2
+from six.moves import reduce
+
+try:
+ xrange
+except NameError:
+ xrange = range
+try:
+ from gmpy2 import powmod
+
+ GMPY2 = True
+ GMPY = False
+except ImportError:
+ GMPY2 = False
+ try:
+ from gmpy import mpz
+
+ GMPY = True
+ except ImportError:
+ GMPY = False
+
+import math
+import warnings
+
+
+class Error(Exception):
+ """Base class for exceptions in this module."""
+
+ pass
+
+
+class SquareRootError(Error):
+ pass
+
+
+class NegativeExponentError(Error):
+ pass
+
+
+def modular_exp(base, exponent, modulus): # pragma: no cover
+ """Raise base to exponent, reducing by modulus"""
+ # deprecated in 0.14
+ warnings.warn(
+ "Function is unused in library code. If you use this code, "
+ "change to pow() builtin.",
+ DeprecationWarning,
+ )
+ if exponent < 0:
+ raise NegativeExponentError(
+ "Negative exponents (%d) not allowed" % exponent
+ )
+ return pow(base, exponent, modulus)
+
+
+def polynomial_reduce_mod(poly, polymod, p):
+ """Reduce poly by polymod, integer arithmetic modulo p.
+
+ Polynomials are represented as lists of coefficients
+ of increasing powers of x."""
+
+ # This module has been tested only by extensive use
+ # in calculating modular square roots.
+
+ # Just to make this easy, require a monic polynomial:
+ assert polymod[-1] == 1
+
+ assert len(polymod) > 1
+
+ while len(poly) >= len(polymod):
+ if poly[-1] != 0:
+ for i in xrange(2, len(polymod) + 1):
+ poly[-i] = (poly[-i] - poly[-1] * polymod[-i]) % p
+ poly = poly[0:-1]
+
+ return poly
+
+
+def polynomial_multiply_mod(m1, m2, polymod, p):
+ """Polynomial multiplication modulo a polynomial over ints mod p.
+
+ Polynomials are represented as lists of coefficients
+ of increasing powers of x."""
+
+ # This is just a seat-of-the-pants implementation.
+
+ # This module has been tested only by extensive use
+ # in calculating modular square roots.
+
+ # Initialize the product to zero:
+
+ prod = (len(m1) + len(m2) - 1) * [0]
+
+ # Add together all the cross-terms:
+
+ for i in xrange(len(m1)):
+ for j in xrange(len(m2)):
+ prod[i + j] = (prod[i + j] + m1[i] * m2[j]) % p
+
+ return polynomial_reduce_mod(prod, polymod, p)
+
+
+def polynomial_exp_mod(base, exponent, polymod, p):
+ """Polynomial exponentiation modulo a polynomial over ints mod p.
+
+ Polynomials are represented as lists of coefficients
+ of increasing powers of x."""
+
+ # Based on the Handbook of Applied Cryptography, algorithm 2.227.
+
+ # This module has been tested only by extensive use
+ # in calculating modular square roots.
+
+ assert exponent < p
+
+ if exponent == 0:
+ return [1]
+
+ G = base
+ k = exponent
+ if k % 2 == 1:
+ s = G
+ else:
+ s = [1]
+
+ while k > 1:
+ k = k // 2
+ G = polynomial_multiply_mod(G, G, polymod, p)
+ if k % 2 == 1:
+ s = polynomial_multiply_mod(G, s, polymod, p)
+
+ return s
+
+
+def jacobi(a, n):
+ """Jacobi symbol"""
+
+ # Based on the Handbook of Applied Cryptography (HAC), algorithm 2.149.
+
+ # This function has been tested by comparison with a small
+ # table printed in HAC, and by extensive use in calculating
+ # modular square roots.
+
+ assert n >= 3
+ assert n % 2 == 1
+ a = a % n
+ if a == 0:
+ return 0
+ if a == 1:
+ return 1
+ a1, e = a, 0
+ while a1 % 2 == 0:
+ a1, e = a1 // 2, e + 1
+ if e % 2 == 0 or n % 8 == 1 or n % 8 == 7:
+ s = 1
+ else:
+ s = -1
+ if a1 == 1:
+ return s
+ if n % 4 == 3 and a1 % 4 == 3:
+ s = -s
+ return s * jacobi(n % a1, a1)
+
+
+def square_root_mod_prime(a, p):
+ """Modular square root of a, mod p, p prime."""
+
+ # Based on the Handbook of Applied Cryptography, algorithms 3.34 to 3.39.
+
+ # This module has been tested for all values in [0,p-1] for
+ # every prime p from 3 to 1229.
+
+ assert 0 <= a < p
+ assert 1 < p
+
+ if a == 0:
+ return 0
+ if p == 2:
+ return a
+
+ jac = jacobi(a, p)
+ if jac == -1:
+ raise SquareRootError("%d has no square root modulo %d" % (a, p))
+
+ if p % 4 == 3:
+ return pow(a, (p + 1) // 4, p)
+
+ if p % 8 == 5:
+ d = pow(a, (p - 1) // 4, p)
+ if d == 1:
+ return pow(a, (p + 3) // 8, p)
+ if d == p - 1:
+ return (2 * a * pow(4 * a, (p - 5) // 8, p)) % p
+ raise RuntimeError("Shouldn't get here.")
+
+ if PY2:
+ # xrange on python2 can take integers representable as C long only
+ range_top = min(0x7FFFFFFF, p)
+ else:
+ range_top = p
+ for b in xrange(2, range_top):
+ if jacobi(b * b - 4 * a, p) == -1:
+ f = (a, -b, 1)
+ ff = polynomial_exp_mod((0, 1), (p + 1) // 2, f, p)
+ assert ff[1] == 0
+ return ff[0]
+ raise RuntimeError("No b found.")
+
+
+if GMPY2:
+
+ def inverse_mod(a, m):
+ """Inverse of a mod m."""
+ if a == 0:
+ return 0
+ return powmod(a, -1, m)
+
+
+elif GMPY:
+
+ def inverse_mod(a, m):
+ """Inverse of a mod m."""
+ # while libgmp likely does support inverses modulo, it is accessible
+ # only using the native `pow()` function, and `pow()` sanity checks
+ # the parameters before passing them on to underlying implementation
+ # on Python2
+ if a == 0:
+ return 0
+ a = mpz(a)
+ m = mpz(m)
+
+ lm, hm = mpz(1), mpz(0)
+ low, high = a % m, m
+ while low > 1:
+ r = high // low
+ lm, low, hm, high = hm - lm * r, high - low * r, lm, low
+
+ return lm % m
+
+
+else:
+
+ def inverse_mod(a, m):
+ """Inverse of a mod m."""
+
+ if a == 0:
+ return 0
+
+ lm, hm = 1, 0
+ low, high = a % m, m
+ while low > 1:
+ r = high // low
+ lm, low, hm, high = hm - lm * r, high - low * r, lm, low
+
+ return lm % m
+
+
+try:
+ gcd2 = math.gcd
+except AttributeError:
+
+ def gcd2(a, b):
+ """Greatest common divisor using Euclid's algorithm."""
+ while a:
+ a, b = b % a, a
+ return b
+
+
+def gcd(*a):
+ """Greatest common divisor.
+
+ Usage: gcd([ 2, 4, 6 ])
+ or: gcd(2, 4, 6)
+ """
+
+ if len(a) > 1:
+ return reduce(gcd2, a)
+ if hasattr(a[0], "__iter__"):
+ return reduce(gcd2, a[0])
+ return a[0]
+
+
+def lcm2(a, b):
+ """Least common multiple of two integers."""
+
+ return (a * b) // gcd(a, b)
+
+
+def lcm(*a):
+ """Least common multiple.
+
+ Usage: lcm([ 3, 4, 5 ])
+ or: lcm(3, 4, 5)
+ """
+
+ if len(a) > 1:
+ return reduce(lcm2, a)
+ if hasattr(a[0], "__iter__"):
+ return reduce(lcm2, a[0])
+ return a[0]
+
+
+def factorization(n):
+ """Decompose n into a list of (prime,exponent) pairs."""
+
+ assert isinstance(n, integer_types)
+
+ if n < 2:
+ return []
+
+ result = []
+ d = 2
+
+ # Test the small primes:
+
+ for d in smallprimes:
+ if d > n:
+ break
+ q, r = divmod(n, d)
+ if r == 0:
+ count = 1
+ while d <= n:
+ n = q
+ q, r = divmod(n, d)
+ if r != 0:
+ break
+ count = count + 1
+ result.append((d, count))
+
+ # If n is still greater than the last of our small primes,
+ # it may require further work:
+
+ if n > smallprimes[-1]:
+ if is_prime(n): # If what's left is prime, it's easy:
+ result.append((n, 1))
+ else: # Ugh. Search stupidly for a divisor:
+ d = smallprimes[-1]
+ while 1:
+ d = d + 2 # Try the next divisor.
+ q, r = divmod(n, d)
+ if q < d: # n < d*d means we're done, n = 1 or prime.
+ break
+ if r == 0: # d divides n. How many times?
+ count = 1
+ n = q
+ while d <= n: # As long as d might still divide n,
+ q, r = divmod(n, d) # see if it does.
+ if r != 0:
+ break
+ n = q # It does. Reduce n, increase count.
+ count = count + 1
+ result.append((d, count))
+ if n > 1:
+ result.append((n, 1))
+
+ return result
+
+
+def phi(n): # pragma: no cover
+ """Return the Euler totient function of n."""
+ # deprecated in 0.14
+ warnings.warn(
+ "Function is unused by library code. If you use this code, "
+ "please open an issue in "
+ "https://github.com/warner/python-ecdsa",
+ DeprecationWarning,
+ )
+
+ assert isinstance(n, integer_types)
+
+ if n < 3:
+ return 1
+
+ result = 1
+ ff = factorization(n)
+ for f in ff:
+ e = f[1]
+ if e > 1:
+ result = result * f[0] ** (e - 1) * (f[0] - 1)
+ else:
+ result = result * (f[0] - 1)
+ return result
+
+
+def carmichael(n): # pragma: no cover
+ """Return Carmichael function of n.
+
+ Carmichael(n) is the smallest integer x such that
+ m**x = 1 mod n for all m relatively prime to n.
+ """
+ # deprecated in 0.14
+ warnings.warn(
+ "Function is unused by library code. If you use this code, "
+ "please open an issue in "
+ "https://github.com/warner/python-ecdsa",
+ DeprecationWarning,
+ )
+
+ return carmichael_of_factorized(factorization(n))
+
+
+def carmichael_of_factorized(f_list): # pragma: no cover
+ """Return the Carmichael function of a number that is
+ represented as a list of (prime,exponent) pairs.
+ """
+ # deprecated in 0.14
+ warnings.warn(
+ "Function is unused by library code. If you use this code, "
+ "please open an issue in "
+ "https://github.com/warner/python-ecdsa",
+ DeprecationWarning,
+ )
+
+ if len(f_list) < 1:
+ return 1
+
+ result = carmichael_of_ppower(f_list[0])
+ for i in xrange(1, len(f_list)):
+ result = lcm(result, carmichael_of_ppower(f_list[i]))
+
+ return result
+
+
+def carmichael_of_ppower(pp): # pragma: no cover
+ """Carmichael function of the given power of the given prime."""
+ # deprecated in 0.14
+ warnings.warn(
+ "Function is unused by library code. If you use this code, "
+ "please open an issue in "
+ "https://github.com/warner/python-ecdsa",
+ DeprecationWarning,
+ )
+
+ p, a = pp
+ if p == 2 and a > 2:
+ return 2 ** (a - 2)
+ else:
+ return (p - 1) * p ** (a - 1)
+
+
+def order_mod(x, m): # pragma: no cover
+ """Return the order of x in the multiplicative group mod m."""
+ # deprecated in 0.14
+ warnings.warn(
+ "Function is unused by library code. If you use this code, "
+ "please open an issue in "
+ "https://github.com/warner/python-ecdsa",
+ DeprecationWarning,
+ )
+
+ # Warning: this implementation is not very clever, and will
+ # take a long time if m is very large.
+
+ if m <= 1:
+ return 0
+
+ assert gcd(x, m) == 1
+
+ z = x
+ result = 1
+ while z != 1:
+ z = (z * x) % m
+ result = result + 1
+ return result
+
+
+def largest_factor_relatively_prime(a, b): # pragma: no cover
+ """Return the largest factor of a relatively prime to b."""
+ # deprecated in 0.14
+ warnings.warn(
+ "Function is unused by library code. If you use this code, "
+ "please open an issue in "
+ "https://github.com/warner/python-ecdsa",
+ DeprecationWarning,
+ )
+
+ while 1:
+ d = gcd(a, b)
+ if d <= 1:
+ break
+ b = d
+ while 1:
+ q, r = divmod(a, d)
+ if r > 0:
+ break
+ a = q
+ return a
+
+
+def kinda_order_mod(x, m): # pragma: no cover
+ """Return the order of x in the multiplicative group mod m',
+ where m' is the largest factor of m relatively prime to x.
+ """
+ # deprecated in 0.14
+ warnings.warn(
+ "Function is unused by library code. If you use this code, "
+ "please open an issue in "
+ "https://github.com/warner/python-ecdsa",
+ DeprecationWarning,
+ )
+
+ return order_mod(x, largest_factor_relatively_prime(m, x))
+
+
+def is_prime(n):
+ """Return True if x is prime, False otherwise.
+
+ We use the Miller-Rabin test, as given in Menezes et al. p. 138.
+ This test is not exact: there are composite values n for which
+ it returns True.
+
+ In testing the odd numbers from 10000001 to 19999999,
+ about 66 composites got past the first test,
+ 5 got past the second test, and none got past the third.
+ Since factors of 2, 3, 5, 7, and 11 were detected during
+ preliminary screening, the number of numbers tested by
+ Miller-Rabin was (19999999 - 10000001)*(2/3)*(4/5)*(6/7)
+ = 4.57 million.
+ """
+
+ # (This is used to study the risk of false positives:)
+ global miller_rabin_test_count
+
+ miller_rabin_test_count = 0
+
+ if n <= smallprimes[-1]:
+ if n in smallprimes:
+ return True
+ else:
+ return False
+
+ if gcd(n, 2 * 3 * 5 * 7 * 11) != 1:
+ return False
+
+ # Choose a number of iterations sufficient to reduce the
+ # probability of accepting a composite below 2**-80
+ # (from Menezes et al. Table 4.4):
+
+ t = 40
+ n_bits = 1 + int(math.log(n, 2))
+ for k, tt in (
+ (100, 27),
+ (150, 18),
+ (200, 15),
+ (250, 12),
+ (300, 9),
+ (350, 8),
+ (400, 7),
+ (450, 6),
+ (550, 5),
+ (650, 4),
+ (850, 3),
+ (1300, 2),
+ ):
+ if n_bits < k:
+ break
+ t = tt
+
+ # Run the test t times:
+
+ s = 0
+ r = n - 1
+ while (r % 2) == 0:
+ s = s + 1
+ r = r // 2
+ for i in xrange(t):
+ a = smallprimes[i]
+ y = pow(a, r, n)
+ if y != 1 and y != n - 1:
+ j = 1
+ while j <= s - 1 and y != n - 1:
+ y = pow(y, 2, n)
+ if y == 1:
+ miller_rabin_test_count = i + 1
+ return False
+ j = j + 1
+ if y != n - 1:
+ miller_rabin_test_count = i + 1
+ return False
+ return True
+
+
+def next_prime(starting_value):
+ """Return the smallest prime larger than the starting value."""
+
+ if starting_value < 2:
+ return 2
+ result = (starting_value + 1) | 1
+ while not is_prime(result):
+ result = result + 2
+ return result
+
+
+smallprimes = [
+ 2,
+ 3,
+ 5,
+ 7,
+ 11,
+ 13,
+ 17,
+ 19,
+ 23,
+ 29,
+ 31,
+ 37,
+ 41,
+ 43,
+ 47,
+ 53,
+ 59,
+ 61,
+ 67,
+ 71,
+ 73,
+ 79,
+ 83,
+ 89,
+ 97,
+ 101,
+ 103,
+ 107,
+ 109,
+ 113,
+ 127,
+ 131,
+ 137,
+ 139,
+ 149,
+ 151,
+ 157,
+ 163,
+ 167,
+ 173,
+ 179,
+ 181,
+ 191,
+ 193,
+ 197,
+ 199,
+ 211,
+ 223,
+ 227,
+ 229,
+ 233,
+ 239,
+ 241,
+ 251,
+ 257,
+ 263,
+ 269,
+ 271,
+ 277,
+ 281,
+ 283,
+ 293,
+ 307,
+ 311,
+ 313,
+ 317,
+ 331,
+ 337,
+ 347,
+ 349,
+ 353,
+ 359,
+ 367,
+ 373,
+ 379,
+ 383,
+ 389,
+ 397,
+ 401,
+ 409,
+ 419,
+ 421,
+ 431,
+ 433,
+ 439,
+ 443,
+ 449,
+ 457,
+ 461,
+ 463,
+ 467,
+ 479,
+ 487,
+ 491,
+ 499,
+ 503,
+ 509,
+ 521,
+ 523,
+ 541,
+ 547,
+ 557,
+ 563,
+ 569,
+ 571,
+ 577,
+ 587,
+ 593,
+ 599,
+ 601,
+ 607,
+ 613,
+ 617,
+ 619,
+ 631,
+ 641,
+ 643,
+ 647,
+ 653,
+ 659,
+ 661,
+ 673,
+ 677,
+ 683,
+ 691,
+ 701,
+ 709,
+ 719,
+ 727,
+ 733,
+ 739,
+ 743,
+ 751,
+ 757,
+ 761,
+ 769,
+ 773,
+ 787,
+ 797,
+ 809,
+ 811,
+ 821,
+ 823,
+ 827,
+ 829,
+ 839,
+ 853,
+ 857,
+ 859,
+ 863,
+ 877,
+ 881,
+ 883,
+ 887,
+ 907,
+ 911,
+ 919,
+ 929,
+ 937,
+ 941,
+ 947,
+ 953,
+ 967,
+ 971,
+ 977,
+ 983,
+ 991,
+ 997,
+ 1009,
+ 1013,
+ 1019,
+ 1021,
+ 1031,
+ 1033,
+ 1039,
+ 1049,
+ 1051,
+ 1061,
+ 1063,
+ 1069,
+ 1087,
+ 1091,
+ 1093,
+ 1097,
+ 1103,
+ 1109,
+ 1117,
+ 1123,
+ 1129,
+ 1151,
+ 1153,
+ 1163,
+ 1171,
+ 1181,
+ 1187,
+ 1193,
+ 1201,
+ 1213,
+ 1217,
+ 1223,
+ 1229,
+]
+
+miller_rabin_test_count = 0
diff --git a/frozen_deps/ecdsa/rfc6979.py b/frozen_deps/ecdsa/rfc6979.py
new file mode 100644
index 0000000..1e577c0
--- /dev/null
+++ b/frozen_deps/ecdsa/rfc6979.py
@@ -0,0 +1,110 @@
+"""
+RFC 6979:
+ Deterministic Usage of the Digital Signature Algorithm (DSA) and
+ Elliptic Curve Digital Signature Algorithm (ECDSA)
+
+ http://tools.ietf.org/html/rfc6979
+
+Many thanks to Coda Hale for his implementation in Go language:
+ https://github.com/codahale/rfc6979
+"""
+
+import hmac
+from binascii import hexlify
+from .util import number_to_string, number_to_string_crop, bit_length
+from ._compat import hmac_compat
+
+
+# bit_length was defined in this module previously so keep it for backwards
+# compatibility, will need to deprecate and remove it later
+__all__ = ["bit_length", "bits2int", "bits2octets", "generate_k"]
+
+
+def bits2int(data, qlen):
+ x = int(hexlify(data), 16)
+ l = len(data) * 8
+
+ if l > qlen:
+ return x >> (l - qlen)
+ return x
+
+
+def bits2octets(data, order):
+ z1 = bits2int(data, bit_length(order))
+ z2 = z1 - order
+
+ if z2 < 0:
+ z2 = z1
+
+ return number_to_string_crop(z2, order)
+
+
+# https://tools.ietf.org/html/rfc6979#section-3.2
+def generate_k(order, secexp, hash_func, data, retry_gen=0, extra_entropy=b""):
+ """
+ order - order of the DSA generator used in the signature
+ secexp - secure exponent (private key) in numeric form
+ hash_func - reference to the same hash function used for generating
+ hash
+ data - hash in binary form of the signing data
+ retry_gen - int - how many good 'k' values to skip before returning
+ extra_entropy - extra added data in binary form as per section-3.6 of
+ rfc6979
+ """
+
+ qlen = bit_length(order)
+ holen = hash_func().digest_size
+ rolen = (qlen + 7) // 8
+ bx = (
+ hmac_compat(number_to_string(secexp, order)),
+ hmac_compat(bits2octets(data, order)),
+ hmac_compat(extra_entropy),
+ )
+
+ # Step B
+ v = b"\x01" * holen
+
+ # Step C
+ k = b"\x00" * holen
+
+ # Step D
+
+ k = hmac.new(k, digestmod=hash_func)
+ k.update(v + b"\x00")
+ for i in bx:
+ k.update(i)
+ k = k.digest()
+
+ # Step E
+ v = hmac.new(k, v, hash_func).digest()
+
+ # Step F
+ k = hmac.new(k, digestmod=hash_func)
+ k.update(v + b"\x01")
+ for i in bx:
+ k.update(i)
+ k = k.digest()
+
+ # Step G
+ v = hmac.new(k, v, hash_func).digest()
+
+ # Step H
+ while True:
+ # Step H1
+ t = b""
+
+ # Step H2
+ while len(t) < rolen:
+ v = hmac.new(k, v, hash_func).digest()
+ t += v
+
+ # Step H3
+ secret = bits2int(t, qlen)
+
+ if 1 <= secret < order:
+ if retry_gen <= 0:
+ return secret
+ retry_gen -= 1
+
+ k = hmac.new(k, v + b"\x00", hash_func).digest()
+ v = hmac.new(k, v, hash_func).digest()
diff --git a/frozen_deps/ecdsa/test_der.py b/frozen_deps/ecdsa/test_der.py
new file mode 100644
index 0000000..746d927
--- /dev/null
+++ b/frozen_deps/ecdsa/test_der.py
@@ -0,0 +1,397 @@
+# compatibility with Python 2.6, for that we need unittest2 package,
+# which is not available on 3.3 or 3.4
+import warnings
+from binascii import hexlify
+
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+from six import b
+import hypothesis.strategies as st
+from hypothesis import given, example
+import pytest
+from ._compat import str_idx_as_int
+from .curves import NIST256p, NIST224p
+from .der import (
+ remove_integer,
+ UnexpectedDER,
+ read_length,
+ encode_bitstring,
+ remove_bitstring,
+ remove_object,
+ encode_oid,
+)
+
+
+class TestRemoveInteger(unittest.TestCase):
+ # DER requires the integers to be 0-padded only if they would be
+ # interpreted as negative, check if those errors are detected
+ def test_non_minimal_encoding(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_integer(b("\x02\x02\x00\x01"))
+
+ def test_negative_with_high_bit_set(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_integer(b("\x02\x01\x80"))
+
+ def test_minimal_with_high_bit_set(self):
+ val, rem = remove_integer(b("\x02\x02\x00\x80"))
+
+ self.assertEqual(val, 0x80)
+ self.assertFalse(rem)
+
+ def test_two_zero_bytes_with_high_bit_set(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_integer(b("\x02\x03\x00\x00\xff"))
+
+ def test_zero_length_integer(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_integer(b("\x02\x00"))
+
+ def test_empty_string(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_integer(b(""))
+
+ def test_encoding_of_zero(self):
+ val, rem = remove_integer(b("\x02\x01\x00"))
+
+ self.assertEqual(val, 0)
+ self.assertFalse(rem)
+
+ def test_encoding_of_127(self):
+ val, rem = remove_integer(b("\x02\x01\x7f"))
+
+ self.assertEqual(val, 127)
+ self.assertFalse(rem)
+
+ def test_encoding_of_128(self):
+ val, rem = remove_integer(b("\x02\x02\x00\x80"))
+
+ self.assertEqual(val, 128)
+ self.assertFalse(rem)
+
+
+class TestReadLength(unittest.TestCase):
+ # DER requires the lengths between 0 and 127 to be encoded using the short
+ # form and lengths above that encoded with minimal number of bytes
+ # necessary
+ def test_zero_length(self):
+ self.assertEqual((0, 1), read_length(b("\x00")))
+
+ def test_two_byte_zero_length(self):
+ with self.assertRaises(UnexpectedDER):
+ read_length(b("\x81\x00"))
+
+ def test_two_byte_small_length(self):
+ with self.assertRaises(UnexpectedDER):
+ read_length(b("\x81\x7f"))
+
+ def test_long_form_with_zero_length(self):
+ with self.assertRaises(UnexpectedDER):
+ read_length(b("\x80"))
+
+ def test_smallest_two_byte_length(self):
+ self.assertEqual((128, 2), read_length(b("\x81\x80")))
+
+ def test_zero_padded_length(self):
+ with self.assertRaises(UnexpectedDER):
+ read_length(b("\x82\x00\x80"))
+
+ def test_two_three_byte_length(self):
+ self.assertEqual((256, 3), read_length(b"\x82\x01\x00"))
+
+ def test_empty_string(self):
+ with self.assertRaises(UnexpectedDER):
+ read_length(b(""))
+
+ def test_length_overflow(self):
+ with self.assertRaises(UnexpectedDER):
+ read_length(b("\x83\x01\x00"))
+
+
+class TestEncodeBitstring(unittest.TestCase):
+ # DER requires BIT STRINGS to include a number of padding bits in the
+ # encoded byte string, that padding must be between 0 and 7
+
+ def test_old_call_convention(self):
+ """This is the old way to use the function."""
+ warnings.simplefilter("always")
+ with pytest.warns(DeprecationWarning) as warns:
+ der = encode_bitstring(b"\x00\xff")
+
+ self.assertEqual(len(warns), 1)
+ self.assertIn(
+ "unused= needs to be specified", warns[0].message.args[0]
+ )
+
+ self.assertEqual(der, b"\x03\x02\x00\xff")
+
+ def test_new_call_convention(self):
+ """This is how it should be called now."""
+ warnings.simplefilter("always")
+ with pytest.warns(None) as warns:
+ der = encode_bitstring(b"\xff", 0)
+
+ # verify that new call convention doesn't raise Warnings
+ self.assertEqual(len(warns), 0)
+
+ self.assertEqual(der, b"\x03\x02\x00\xff")
+
+ def test_implicit_unused_bits(self):
+ """
+ Writing bit string with already included the number of unused bits.
+ """
+ warnings.simplefilter("always")
+ with pytest.warns(None) as warns:
+ der = encode_bitstring(b"\x00\xff", None)
+
+ # verify that new call convention doesn't raise Warnings
+ self.assertEqual(len(warns), 0)
+
+ self.assertEqual(der, b"\x03\x02\x00\xff")
+
+ def test_explicit_unused_bits(self):
+ der = encode_bitstring(b"\xff\xf0", 4)
+
+ self.assertEqual(der, b"\x03\x03\x04\xff\xf0")
+
+ def test_empty_string(self):
+ self.assertEqual(encode_bitstring(b"", 0), b"\x03\x01\x00")
+
+ def test_invalid_unused_count(self):
+ with self.assertRaises(ValueError):
+ encode_bitstring(b"\xff\x00", 8)
+
+ def test_invalid_unused_with_empty_string(self):
+ with self.assertRaises(ValueError):
+ encode_bitstring(b"", 1)
+
+ def test_non_zero_padding_bits(self):
+ with self.assertRaises(ValueError):
+ encode_bitstring(b"\xff", 2)
+
+
+class TestRemoveBitstring(unittest.TestCase):
+ def test_old_call_convention(self):
+ """This is the old way to call the function."""
+ warnings.simplefilter("always")
+ with pytest.warns(DeprecationWarning) as warns:
+ bits, rest = remove_bitstring(b"\x03\x02\x00\xff")
+
+ self.assertEqual(len(warns), 1)
+ self.assertIn(
+ "expect_unused= needs to be specified", warns[0].message.args[0]
+ )
+
+ self.assertEqual(bits, b"\x00\xff")
+ self.assertEqual(rest, b"")
+
+ def test_new_call_convention(self):
+ warnings.simplefilter("always")
+ with pytest.warns(None) as warns:
+ bits, rest = remove_bitstring(b"\x03\x02\x00\xff", 0)
+
+ self.assertEqual(len(warns), 0)
+
+ self.assertEqual(bits, b"\xff")
+ self.assertEqual(rest, b"")
+
+ def test_implicit_unexpected_unused(self):
+ warnings.simplefilter("always")
+ with pytest.warns(None) as warns:
+ bits, rest = remove_bitstring(b"\x03\x02\x00\xff", None)
+
+ self.assertEqual(len(warns), 0)
+
+ self.assertEqual(bits, (b"\xff", 0))
+ self.assertEqual(rest, b"")
+
+ def test_with_padding(self):
+ ret, rest = remove_bitstring(b"\x03\x02\x04\xf0", None)
+
+ self.assertEqual(ret, (b"\xf0", 4))
+ self.assertEqual(rest, b"")
+
+ def test_not_a_bitstring(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_bitstring(b"\x02\x02\x00\xff", None)
+
+ def test_empty_encoding(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_bitstring(b"\x03\x00", None)
+
+ def test_empty_string(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_bitstring(b"", None)
+
+ def test_no_length(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_bitstring(b"\x03", None)
+
+ def test_unexpected_number_of_unused_bits(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_bitstring(b"\x03\x02\x00\xff", 1)
+
+ def test_invalid_encoding_of_unused_bits(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_bitstring(b"\x03\x03\x08\xff\x00", None)
+
+ def test_invalid_encoding_of_empty_string(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_bitstring(b"\x03\x01\x01", None)
+
+ def test_invalid_padding_bits(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_bitstring(b"\x03\x02\x01\xff", None)
+
+
+class TestStrIdxAsInt(unittest.TestCase):
+ def test_str(self):
+ self.assertEqual(115, str_idx_as_int("str", 0))
+
+ def test_bytes(self):
+ self.assertEqual(115, str_idx_as_int(b"str", 0))
+
+ def test_bytearray(self):
+ self.assertEqual(115, str_idx_as_int(bytearray(b"str"), 0))
+
+
+class TestEncodeOid(unittest.TestCase):
+ def test_pub_key_oid(self):
+ oid_ecPublicKey = encode_oid(1, 2, 840, 10045, 2, 1)
+ self.assertEqual(hexlify(oid_ecPublicKey), b("06072a8648ce3d0201"))
+
+ def test_nist224p_oid(self):
+ self.assertEqual(hexlify(NIST224p.encoded_oid), b("06052b81040021"))
+
+ def test_nist256p_oid(self):
+ self.assertEqual(
+ hexlify(NIST256p.encoded_oid), b"06082a8648ce3d030107"
+ )
+
+ def test_large_second_subid(self):
+ # from X.690, section 8.19.5
+ oid = encode_oid(2, 999, 3)
+ self.assertEqual(oid, b"\x06\x03\x88\x37\x03")
+
+ def test_with_two_subids(self):
+ oid = encode_oid(2, 999)
+ self.assertEqual(oid, b"\x06\x02\x88\x37")
+
+ def test_zero_zero(self):
+ oid = encode_oid(0, 0)
+ self.assertEqual(oid, b"\x06\x01\x00")
+
+ def test_with_wrong_types(self):
+ with self.assertRaises((TypeError, AssertionError)):
+ encode_oid(0, None)
+
+ def test_with_small_first_large_second(self):
+ with self.assertRaises(AssertionError):
+ encode_oid(1, 40)
+
+ def test_small_first_max_second(self):
+ oid = encode_oid(1, 39)
+ self.assertEqual(oid, b"\x06\x01\x4f")
+
+ def test_with_invalid_first(self):
+ with self.assertRaises(AssertionError):
+ encode_oid(3, 39)
+
+
+class TestRemoveObject(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ cls.oid_ecPublicKey = encode_oid(1, 2, 840, 10045, 2, 1)
+
+ def test_pub_key_oid(self):
+ oid, rest = remove_object(self.oid_ecPublicKey)
+ self.assertEqual(rest, b"")
+ self.assertEqual(oid, (1, 2, 840, 10045, 2, 1))
+
+ def test_with_extra_bytes(self):
+ oid, rest = remove_object(self.oid_ecPublicKey + b"more")
+ self.assertEqual(rest, b"more")
+ self.assertEqual(oid, (1, 2, 840, 10045, 2, 1))
+
+ def test_with_large_second_subid(self):
+ # from X.690, section 8.19.5
+ oid, rest = remove_object(b"\x06\x03\x88\x37\x03")
+ self.assertEqual(rest, b"")
+ self.assertEqual(oid, (2, 999, 3))
+
+ def test_with_padded_first_subid(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_object(b"\x06\x02\x80\x00")
+
+ def test_with_padded_second_subid(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_object(b"\x06\x04\x88\x37\x80\x01")
+
+ def test_with_missing_last_byte_of_multi_byte(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_object(b"\x06\x03\x88\x37\x83")
+
+ def test_with_two_subids(self):
+ oid, rest = remove_object(b"\x06\x02\x88\x37")
+ self.assertEqual(rest, b"")
+ self.assertEqual(oid, (2, 999))
+
+ def test_zero_zero(self):
+ oid, rest = remove_object(b"\x06\x01\x00")
+ self.assertEqual(rest, b"")
+ self.assertEqual(oid, (0, 0))
+
+ def test_empty_string(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_object(b"")
+
+ def test_missing_length(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_object(b"\x06")
+
+ def test_empty_oid(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_object(b"\x06\x00")
+
+ def test_empty_oid_overflow(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_object(b"\x06\x01")
+
+ def test_with_wrong_type(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_object(b"\x04\x02\x88\x37")
+
+ def test_with_too_long_length(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_object(b"\x06\x03\x88\x37")
+
+
+def st_oid(draw, max_value=2 ** 512, max_size=50):
+ """
+ Hypothesis strategy that returns valid OBJECT IDENTIFIERs as tuples
+
+ :param max_value: maximum value of any single sub-identifier
+ :param max_size: maximum length of the generated OID
+ """
+ first = draw(st.integers(min_value=0, max_value=2))
+ if first < 2:
+ second = draw(st.integers(min_value=0, max_value=39))
+ else:
+ second = draw(st.integers(min_value=0, max_value=max_value))
+ rest = draw(
+ st.lists(
+ st.integers(min_value=0, max_value=max_value), max_size=max_size
+ )
+ )
+ return (first, second) + tuple(rest)
+
+
+@given(st_oid())
+def test_oids(ids):
+ encoded_oid = encode_oid(*ids)
+ decoded_oid, rest = remove_object(encoded_oid)
+ assert rest == b""
+ assert decoded_oid == ids
diff --git a/frozen_deps/ecdsa/test_ecdh.py b/frozen_deps/ecdsa/test_ecdh.py
new file mode 100644
index 0000000..caf6835
--- /dev/null
+++ b/frozen_deps/ecdsa/test_ecdh.py
@@ -0,0 +1,370 @@
+import os
+import shutil
+import subprocess
+import pytest
+from binascii import hexlify, unhexlify
+
+from .curves import NIST192p, NIST224p, NIST256p, NIST384p, NIST521p
+from .curves import curves
+from .ecdh import ECDH, InvalidCurveError, InvalidSharedSecretError, NoKeyError
+from .keys import SigningKey, VerifyingKey
+
+
+ "vcurve", curves, ids=[curve.name for curve in curves]
+)
+def test_ecdh_each(vcurve):
+ ecdh1 = ECDH(curve=vcurve)
+ ecdh2 = ECDH(curve=vcurve)
+
+ ecdh2.generate_private_key()
+ ecdh1.load_received_public_key(ecdh2.get_public_key())
+ ecdh2.load_received_public_key(ecdh1.generate_private_key())
+
+ secret1 = ecdh1.generate_sharedsecret_bytes()
+ secret2 = ecdh2.generate_sharedsecret_bytes()
+ assert secret1 == secret2
+
+
+def test_ecdh_no_public_key():
+ ecdh1 = ECDH(curve=NIST192p)
+
+ with pytest.raises(NoKeyError):
+ ecdh1.generate_sharedsecret_bytes()
+
+ ecdh1.generate_private_key()
+
+ with pytest.raises(NoKeyError):
+ ecdh1.generate_sharedsecret_bytes()
+
+
+def test_ecdh_wrong_public_key_curve():
+ ecdh1 = ECDH(curve=NIST192p)
+ ecdh1.generate_private_key()
+ ecdh2 = ECDH(curve=NIST256p)
+ ecdh2.generate_private_key()
+
+ with pytest.raises(InvalidCurveError):
+ ecdh1.load_received_public_key(ecdh2.get_public_key())
+
+ with pytest.raises(InvalidCurveError):
+ ecdh2.load_received_public_key(ecdh1.get_public_key())
+
+ ecdh1.public_key = ecdh2.get_public_key()
+ ecdh2.public_key = ecdh1.get_public_key()
+
+ with pytest.raises(InvalidCurveError):
+ ecdh1.generate_sharedsecret_bytes()
+
+ with pytest.raises(InvalidCurveError):
+ ecdh2.generate_sharedsecret_bytes()
+
+
+def test_ecdh_invalid_shared_secret_curve():
+ ecdh1 = ECDH(curve=NIST256p)
+ ecdh1.generate_private_key()
+
+ ecdh1.load_received_public_key(
+ SigningKey.generate(NIST256p).get_verifying_key()
+ )
+
+ ecdh1.private_key.privkey.secret_multiplier = ecdh1.private_key.curve.order
+
+ with pytest.raises(InvalidSharedSecretError):
+ ecdh1.generate_sharedsecret_bytes()
+
+
+# https://github.com/scogliani/ecc-test-vectors/blob/master/ecdh_kat/secp192r1.txt
+# https://github.com/scogliani/ecc-test-vectors/blob/master/ecdh_kat/secp256r1.txt
+# https://github.com/coruus/nist-testvectors/blob/master/csrc.nist.gov/groups/STM/cavp/documents/components/ecccdhtestvectors/KAS_ECC_CDH_PrimitiveTest.txt
+ "curve,privatekey,pubkey,secret",
+ [
+ pytest.param(
+ NIST192p,
+ "f17d3fea367b74d340851ca4270dcb24c271f445bed9d527",
+ "42ea6dd9969dd2a61fea1aac7f8e98edcc896c6e55857cc0"
+ "dfbe5d7c61fac88b11811bde328e8a0d12bf01a9d204b523",
+ "803d8ab2e5b6e6fca715737c3a82f7ce3c783124f6d51cd0",
+ id="NIST192p-1",
+ ),
+ pytest.param(
+ NIST192p,
+ "56e853349d96fe4c442448dacb7cf92bb7a95dcf574a9bd5",
+ "deb5712fa027ac8d2f22c455ccb73a91e17b6512b5e030e7"
+ "7e2690a02cc9b28708431a29fb54b87b1f0c14e011ac2125",
+ "c208847568b98835d7312cef1f97f7aa298283152313c29d",
+ id="NIST192p-2",
+ ),
+ pytest.param(
+ NIST192p,
+ "c6ef61fe12e80bf56f2d3f7d0bb757394519906d55500949",
+ "4edaa8efc5a0f40f843663ec5815e7762dddc008e663c20f"
+ "0a9f8dc67a3e60ef6d64b522185d03df1fc0adfd42478279",
+ "87229107047a3b611920d6e3b2c0c89bea4f49412260b8dd",
+ id="NIST192p-3",
+ ),
+ pytest.param(
+ NIST192p,
+ "e6747b9c23ba7044f38ff7e62c35e4038920f5a0163d3cda",
+ "8887c276edeed3e9e866b46d58d895c73fbd80b63e382e88"
+ "04c5097ba6645e16206cfb70f7052655947dd44a17f1f9d5",
+ "eec0bed8fc55e1feddc82158fd6dc0d48a4d796aaf47d46c",
+ id="NIST192p-4",
+ ),
+ pytest.param(
+ NIST192p,
+ "beabedd0154a1afcfc85d52181c10f5eb47adc51f655047d",
+ "0d045f30254adc1fcefa8a5b1f31bf4e739dd327cd18d594"
+ "542c314e41427c08278a08ce8d7305f3b5b849c72d8aff73",
+ "716e743b1b37a2cd8479f0a3d5a74c10ba2599be18d7e2f4",
+ id="NIST192p-5",
+ ),
+ pytest.param(
+ NIST192p,
+ "cf70354226667321d6e2baf40999e2fd74c7a0f793fa8699",
+ "fb35ca20d2e96665c51b98e8f6eb3d79113508d8bccd4516"
+ "368eec0d5bfb847721df6aaff0e5d48c444f74bf9cd8a5a7",
+ "f67053b934459985a315cb017bf0302891798d45d0e19508",
+ id="NIST192p-6",
+ ),
+ pytest.param(
+ NIST224p,
+ "8346a60fc6f293ca5a0d2af68ba71d1dd389e5e40837942df3e43cbd",
+ "af33cd0629bc7e996320a3f40368f74de8704fa37b8fab69abaae280"
+ "882092ccbba7930f419a8a4f9bb16978bbc3838729992559a6f2e2d7",
+ "7d96f9a3bd3c05cf5cc37feb8b9d5209d5c2597464dec3e9983743e8",
+ id="NIST224p",
+ ),
+ pytest.param(
+ NIST256p,
+ "7d7dc5f71eb29ddaf80d6214632eeae03d9058af1fb6d22ed80badb62bc1a534",
+ "700c48f77f56584c5cc632ca65640db91b6bacce3a4df6b42ce7cc838833d287"
+ "db71e509e3fd9b060ddb20ba5c51dcc5948d46fbf640dfe0441782cab85fa4ac",
+ "46fc62106420ff012e54a434fbdd2d25ccc5852060561e68040dd7778997bd7b",
+ id="NIST256p-1",
+ ),
+ pytest.param(
+ NIST256p,
+ "38f65d6dce47676044d58ce5139582d568f64bb16098d179dbab07741dd5caf5",
+ "809f04289c64348c01515eb03d5ce7ac1a8cb9498f5caa50197e58d43a86a7ae"
+ "b29d84e811197f25eba8f5194092cb6ff440e26d4421011372461f579271cda3",
+ "057d636096cb80b67a8c038c890e887d1adfa4195e9b3ce241c8a778c59cda67",
+ id="NIST256p-2",
+ ),
+ pytest.param(
+ NIST256p,
+ "1accfaf1b97712b85a6f54b148985a1bdc4c9bec0bd258cad4b3d603f49f32c8",
+ "a2339c12d4a03c33546de533268b4ad667debf458b464d77443636440ee7fec3"
+ "ef48a3ab26e20220bcda2c1851076839dae88eae962869a497bf73cb66faf536",
+ "2d457b78b4614132477618a5b077965ec90730a8c81a1c75d6d4ec68005d67ec",
+ id="NIST256p-3",
+ ),
+ pytest.param(
+ NIST256p,
+ "207c43a79bfee03db6f4b944f53d2fb76cc49ef1c9c4d34d51b6c65c4db6932d",
+ "df3989b9fa55495719b3cf46dccd28b5153f7808191dd518eff0c3cff2b705ed"
+ "422294ff46003429d739a33206c8752552c8ba54a270defc06e221e0feaf6ac4",
+ "96441259534b80f6aee3d287a6bb17b5094dd4277d9e294f8fe73e48bf2a0024",
+ id="NIST256p-4",
+ ),
+ pytest.param(
+ NIST256p,
+ "59137e38152350b195c9718d39673d519838055ad908dd4757152fd8255c09bf",
+ "41192d2813e79561e6a1d6f53c8bc1a433a199c835e141b05a74a97b0faeb922"
+ "1af98cc45e98a7e041b01cf35f462b7562281351c8ebf3ffa02e33a0722a1328",
+ "19d44c8d63e8e8dd12c22a87b8cd4ece27acdde04dbf47f7f27537a6999a8e62",
+ id="NIST256p-5",
+ ),
+ pytest.param(
+ NIST256p,
+ "f5f8e0174610a661277979b58ce5c90fee6c9b3bb346a90a7196255e40b132ef",
+ "33e82092a0f1fb38f5649d5867fba28b503172b7035574bf8e5b7100a3052792"
+ "f2cf6b601e0a05945e335550bf648d782f46186c772c0f20d3cd0d6b8ca14b2f",
+ "664e45d5bba4ac931cd65d52017e4be9b19a515f669bea4703542a2c525cd3d3",
+ id="NIST256p-6",
+ ),
+ pytest.param(
+ NIST384p,
+ "3cc3122a68f0d95027ad38c067916ba0eb8c38894d22e1b1"
+ "5618b6818a661774ad463b205da88cf699ab4d43c9cf98a1",
+ "a7c76b970c3b5fe8b05d2838ae04ab47697b9eaf52e76459"
+ "2efda27fe7513272734466b400091adbf2d68c58e0c50066"
+ "ac68f19f2e1cb879aed43a9969b91a0839c4c38a49749b66"
+ "1efedf243451915ed0905a32b060992b468c64766fc8437a",
+ "5f9d29dc5e31a163060356213669c8ce132e22f57c9a04f4"
+ "0ba7fcead493b457e5621e766c40a2e3d4d6a04b25e533f1",
+ id="NIST384p",
+ ),
+ pytest.param(
+ NIST521p,
+ "017eecc07ab4b329068fba65e56a1f8890aa935e57134ae0ffcce802735151f4ea"
+ "c6564f6ee9974c5e6887a1fefee5743ae2241bfeb95d5ce31ddcb6f9edb4d6fc47",
+ "00685a48e86c79f0f0875f7bc18d25eb5fc8c0b07e5da4f4370f3a949034085433"
+ "4b1e1b87fa395464c60626124a4e70d0f785601d37c09870ebf176666877a2046d"
+ "01ba52c56fc8776d9e8f5db4f0cc27636d0b741bbe05400697942e80b739884a83"
+ "bde99e0f6716939e632bc8986fa18dccd443a348b6c3e522497955a4f3c302f676",
+ "005fc70477c3e63bc3954bd0df3ea0d1f41ee21746ed95fc5e1fdf90930d5e1366"
+ "72d72cc770742d1711c3c3a4c334a0ad9759436a4d3c5bf6e74b9578fac148c831",
+ id="NIST521p",
+ ),
+ ],
+)
+def test_ecdh_NIST(curve, privatekey, pubkey, secret):
+ ecdh = ECDH(curve=curve)
+ ecdh.load_private_key_bytes(unhexlify(privatekey))
+ ecdh.load_received_public_key_bytes(unhexlify(pubkey))
+
+ sharedsecret = ecdh.generate_sharedsecret_bytes()
+
+ assert sharedsecret == unhexlify(secret)
+
+
+pem_local_private_key = (
+ "-----BEGIN EC PRIVATE KEY-----\n"
+ "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n"
+ "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n"
+ "bA==\n"
+ "-----END EC PRIVATE KEY-----\n"
+)
+der_local_private_key = (
+ "305f02010104185ec8420bd6ef9252a942e989043ca29f561fa525770eb1c5a00a06082a864"
+ "8ce3d030101a13403320004b88177d084ef17f5e45639408028360f9f59b4a4d7264e62da06"
+ "51dce47a35a4c5b45cf51593423a8b557b9c2099f36c"
+)
+pem_remote_public_key = (
+ "-----BEGIN PUBLIC KEY-----\n"
+ "MEkwEwYHKoZIzj0CAQYIKoZIzj0DAQEDMgAEuIF30ITvF/XkVjlAgCg2D59ZtKTX\n"
+ "Jk5i2gZR3OR6NaTFtFz1FZNCOotVe5wgmfNs\n"
+ "-----END PUBLIC KEY-----\n"
+)
+der_remote_public_key = (
+ "3049301306072a8648ce3d020106082a8648ce3d03010103320004b88177d084ef17f5e4563"
+ "9408028360f9f59b4a4d7264e62da0651dce47a35a4c5b45cf51593423a8b557b9c2099f36c"
+)
+gshared_secret = "8f457e34982478d1c34b9cd2d0c15911b72dd60d869e2cea"
+
+
+def test_ecdh_pem():
+ ecdh = ECDH()
+ ecdh.load_private_key_pem(pem_local_private_key)
+ ecdh.load_received_public_key_pem(pem_remote_public_key)
+
+ sharedsecret = ecdh.generate_sharedsecret_bytes()
+
+ assert sharedsecret == unhexlify(gshared_secret)
+
+
+def test_ecdh_der():
+ ecdh = ECDH()
+ ecdh.load_private_key_der(unhexlify(der_local_private_key))
+ ecdh.load_received_public_key_der(unhexlify(der_remote_public_key))
+
+ sharedsecret = ecdh.generate_sharedsecret_bytes()
+
+ assert sharedsecret == unhexlify(gshared_secret)
+
+
+# Exception classes used by run_openssl.
+class RunOpenSslError(Exception):
+ pass
+
+
+def run_openssl(cmd):
+ OPENSSL = "openssl"
+ p = subprocess.Popen(
+ [OPENSSL] + cmd.split(),
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ )
+ stdout, ignored = p.communicate()
+ if p.returncode != 0:
+ raise RunOpenSslError(
+ "cmd '%s %s' failed: rc=%s, stdout/err was %s"
+ % (OPENSSL, cmd, p.returncode, stdout)
+ )
+ return stdout.decode()
+
+
+OPENSSL_SUPPORTED_CURVES = set(
+ c.split(":")[0].strip()
+ for c in run_openssl("ecparam -list_curves").split("\n")
+)
+
+
+ "vcurve", curves, ids=[curve.name for curve in curves]
+)
+def test_ecdh_with_openssl(vcurve):
+ assert vcurve.openssl_name
+
+ if vcurve.openssl_name not in OPENSSL_SUPPORTED_CURVES:
+ pytest.skip("system openssl does not support " + vcurve.openssl_name)
+ return
+
+ try:
+ hlp = run_openssl("pkeyutl -help")
+ if hlp.find("-derive") == 0:
+ pytest.skip("system openssl does not support `pkeyutl -derive`")
+ return
+ except RunOpenSslError:
+ pytest.skip("system openssl does not support `pkeyutl -derive`")
+ return
+
+ if os.path.isdir("t"):
+ shutil.rmtree("t")
+ os.mkdir("t")
+ run_openssl(
+ "ecparam -name %s -genkey -out t/privkey1.pem" % vcurve.openssl_name
+ )
+ run_openssl(
+ "ecparam -name %s -genkey -out t/privkey2.pem" % vcurve.openssl_name
+ )
+ run_openssl("ec -in t/privkey1.pem -pubout -out t/pubkey1.pem")
+
+ ecdh1 = ECDH(curve=vcurve)
+ ecdh2 = ECDH(curve=vcurve)
+ with open("t/privkey1.pem") as e:
+ key = e.read()
+ ecdh1.load_private_key_pem(key)
+ with open("t/privkey2.pem") as e:
+ key = e.read()
+ ecdh2.load_private_key_pem(key)
+
+ with open("t/pubkey1.pem") as e:
+ key = e.read()
+ vk1 = VerifyingKey.from_pem(key)
+ assert vk1.to_string() == ecdh1.get_public_key().to_string()
+ vk2 = ecdh2.get_public_key()
+ with open("t/pubkey2.pem", "wb") as e:
+ e.write(vk2.to_pem())
+
+ ecdh1.load_received_public_key(vk2)
+ ecdh2.load_received_public_key(vk1)
+ secret1 = ecdh1.generate_sharedsecret_bytes()
+ secret2 = ecdh2.generate_sharedsecret_bytes()
+
+ assert secret1 == secret2
+
+ try:
+ run_openssl(
+ "pkeyutl -derive -inkey t/privkey1.pem -peerkey t/pubkey2.pem -out t/secret1"
+ )
+ run_openssl(
+ "pkeyutl -derive -inkey t/privkey2.pem -peerkey t/pubkey1.pem -out t/secret2"
+ )
+ except RunOpenSslError:
+ pytest.skip("system openssl does not support `pkeyutl -derive`")
+ return
+
+ with open("t/secret1", "rb") as e:
+ ssl_secret1 = e.read()
+ with open("t/secret1", "rb") as e:
+ ssl_secret2 = e.read()
+
+ if len(ssl_secret1) != vk1.curve.baselen:
+ pytest.skip("system openssl does not support `pkeyutl -derive`")
+ return
+
+ assert ssl_secret1 == ssl_secret2
+ assert secret1 == ssl_secret1
diff --git a/frozen_deps/ecdsa/test_ecdsa.py b/frozen_deps/ecdsa/test_ecdsa.py
new file mode 100644
index 0000000..e656b88
--- /dev/null
+++ b/frozen_deps/ecdsa/test_ecdsa.py
@@ -0,0 +1,538 @@
+from __future__ import print_function
+import sys
+import hypothesis.strategies as st
+from hypothesis import given, settings, note, example
+
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+import pytest
+from .ecdsa import (
+ Private_key,
+ Public_key,
+ Signature,
+ generator_192,
+ digest_integer,
+ ellipticcurve,
+ point_is_valid,
+ generator_224,
+ generator_256,
+ generator_384,
+ generator_521,
+ generator_secp256k1,
+)
+
+
+HYP_SETTINGS = {}
+# old hypothesis doesn't have the "deadline" setting
+if sys.version_info > (2, 7): # pragma: no branch
+ # SEC521p is slow, allow long execution for it
+ HYP_SETTINGS["deadline"] = 5000
+
+
+class TestP192FromX9_62(unittest.TestCase):
+ """Check test vectors from X9.62"""
+
+ @classmethod
+ def setUpClass(cls):
+ cls.d = 651056770906015076056810763456358567190100156695615665659
+ cls.Q = cls.d * generator_192
+ cls.k = 6140507067065001063065065565667405560006161556565665656654
+ cls.R = cls.k * generator_192
+
+ cls.msg = 968236873715988614170569073515315707566766479517
+ cls.pubk = Public_key(generator_192, generator_192 * cls.d)
+ cls.privk = Private_key(cls.pubk, cls.d)
+ cls.sig = cls.privk.sign(cls.msg, cls.k)
+
+ def test_point_multiplication(self):
+ assert self.Q.x() == 0x62B12D60690CDCF330BABAB6E69763B471F994DD702D16A5
+
+ def test_point_multiplication_2(self):
+ assert self.R.x() == 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD
+ assert self.R.y() == 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835
+
+ def test_mult_and_addition(self):
+ u1 = 2563697409189434185194736134579731015366492496392189760599
+ u2 = 6266643813348617967186477710235785849136406323338782220568
+ temp = u1 * generator_192 + u2 * self.Q
+ assert temp.x() == 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD
+ assert temp.y() == 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835
+
+ def test_signature(self):
+ r, s = self.sig.r, self.sig.s
+ assert r == 3342403536405981729393488334694600415596881826869351677613
+ assert s == 5735822328888155254683894997897571951568553642892029982342
+
+ def test_verification(self):
+ assert self.pubk.verifies(self.msg, self.sig)
+
+ def test_rejection(self):
+ assert not self.pubk.verifies(self.msg - 1, self.sig)
+
+
+class TestPublicKey(unittest.TestCase):
+ def test_equality_public_keys(self):
+ gen = generator_192
+ x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6
+ y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F
+ point = ellipticcurve.Point(gen.curve(), x, y)
+ pub_key1 = Public_key(gen, point)
+ pub_key2 = Public_key(gen, point)
+ self.assertEqual(pub_key1, pub_key2)
+
+ def test_inequality_public_key(self):
+ gen = generator_192
+ x1 = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6
+ y1 = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F
+ point1 = ellipticcurve.Point(gen.curve(), x1, y1)
+
+ x2 = 0x6A223D00BD22C52833409A163E057E5B5DA1DEF2A197DD15
+ y2 = 0x7B482604199367F1F303F9EF627F922F97023E90EAE08ABF
+ point2 = ellipticcurve.Point(gen.curve(), x2, y2)
+
+ pub_key1 = Public_key(gen, point1)
+ pub_key2 = Public_key(gen, point2)
+ self.assertNotEqual(pub_key1, pub_key2)
+
+ def test_inequality_public_key_not_implemented(self):
+ gen = generator_192
+ x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6
+ y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F
+ point = ellipticcurve.Point(gen.curve(), x, y)
+ pub_key = Public_key(gen, point)
+ self.assertNotEqual(pub_key, None)
+
+
+class TestPrivateKey(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ gen = generator_192
+ x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6
+ y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F
+ point = ellipticcurve.Point(gen.curve(), x, y)
+ cls.pub_key = Public_key(gen, point)
+
+ def test_equality_private_keys(self):
+ pr_key1 = Private_key(self.pub_key, 100)
+ pr_key2 = Private_key(self.pub_key, 100)
+ self.assertEqual(pr_key1, pr_key2)
+
+ def test_inequality_private_keys(self):
+ pr_key1 = Private_key(self.pub_key, 100)
+ pr_key2 = Private_key(self.pub_key, 200)
+ self.assertNotEqual(pr_key1, pr_key2)
+
+ def test_inequality_private_keys_not_implemented(self):
+ pr_key = Private_key(self.pub_key, 100)
+ self.assertNotEqual(pr_key, None)
+
+
+# Testing point validity, as per ECDSAVS.pdf B.2.2:
+P192_POINTS = [
+ (
+ generator_192,
+ 0xCD6D0F029A023E9AACA429615B8F577ABEE685D8257CC83A,
+ 0x00019C410987680E9FB6C0B6ECC01D9A2647C8BAE27721BACDFC,
+ False,
+ ),
+ (
+ generator_192,
+ 0x00017F2FCE203639E9EAF9FB50B81FC32776B30E3B02AF16C73B,
+ 0x95DA95C5E72DD48E229D4748D4EEE658A9A54111B23B2ADB,
+ False,
+ ),
+ (
+ generator_192,
+ 0x4F77F8BC7FCCBADD5760F4938746D5F253EE2168C1CF2792,
+ 0x000147156FF824D131629739817EDB197717C41AAB5C2A70F0F6,
+ False,
+ ),
+ (
+ generator_192,
+ 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6,
+ 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F,
+ True,
+ ),
+ (
+ generator_192,
+ 0xCDF56C1AA3D8AFC53C521ADF3FFB96734A6A630A4A5B5A70,
+ 0x97C1C44A5FB229007B5EC5D25F7413D170068FFD023CAA4E,
+ True,
+ ),
+ (
+ generator_192,
+ 0x89009C0DC361C81E99280C8E91DF578DF88CDF4B0CDEDCED,
+ 0x27BE44A529B7513E727251F128B34262A0FD4D8EC82377B9,
+ True,
+ ),
+ (
+ generator_192,
+ 0x6A223D00BD22C52833409A163E057E5B5DA1DEF2A197DD15,
+ 0x7B482604199367F1F303F9EF627F922F97023E90EAE08ABF,
+ True,
+ ),
+ (
+ generator_192,
+ 0x6DCCBDE75C0948C98DAB32EA0BC59FE125CF0FB1A3798EDA,
+ 0x0001171A3E0FA60CF3096F4E116B556198DE430E1FBD330C8835,
+ False,
+ ),
+ (
+ generator_192,
+ 0xD266B39E1F491FC4ACBBBC7D098430931CFA66D55015AF12,
+ 0x193782EB909E391A3148B7764E6B234AA94E48D30A16DBB2,
+ False,
+ ),
+ (
+ generator_192,
+ 0x9D6DDBCD439BAA0C6B80A654091680E462A7D1D3F1FFEB43,
+ 0x6AD8EFC4D133CCF167C44EB4691C80ABFFB9F82B932B8CAA,
+ False,
+ ),
+ (
+ generator_192,
+ 0x146479D944E6BDA87E5B35818AA666A4C998A71F4E95EDBC,
+ 0xA86D6FE62BC8FBD88139693F842635F687F132255858E7F6,
+ False,
+ ),
+ (
+ generator_192,
+ 0xE594D4A598046F3598243F50FD2C7BD7D380EDB055802253,
+ 0x509014C0C4D6B536E3CA750EC09066AF39B4C8616A53A923,
+ False,
+ ),
+]
+
+
[email protected]("generator,x,y,expected", P192_POINTS)
+def test_point_validity(generator, x, y, expected):
+ """
+ `generator` defines the curve; is `(x, y)` a point on
+ this curve? `expected` is True if the right answer is Yes.
+ """
+ assert point_is_valid(generator, x, y) == expected
+
+
+# Trying signature-verification tests from ECDSAVS.pdf B.2.4:
+CURVE_192_KATS = [
+ (
+ generator_192,
+ int(
+ "0x84ce72aa8699df436059f052ac51b6398d2511e49631bcb7e71f89c499b9ee"
+ "425dfbc13a5f6d408471b054f2655617cbbaf7937b7c80cd8865cf02c8487d30"
+ "d2b0fbd8b2c4e102e16d828374bbc47b93852f212d5043c3ea720f086178ff79"
+ "8cc4f63f787b9c2e419efa033e7644ea7936f54462dc21a6c4580725f7f0e7d1"
+ "58",
+ 16,
+ ),
+ 0xD9DBFB332AA8E5FF091E8CE535857C37C73F6250FFB2E7AC,
+ 0x282102E364FEDED3AD15DDF968F88D8321AA268DD483EBC4,
+ 0x64DCA58A20787C488D11D6DD96313F1B766F2D8EFE122916,
+ 0x1ECBA28141E84AB4ECAD92F56720E2CC83EB3D22DEC72479,
+ True,
+ ),
+ (
+ generator_192,
+ int(
+ "0x94bb5bacd5f8ea765810024db87f4224ad71362a3c28284b2b9f39fab86db1"
+ "2e8beb94aae899768229be8fdb6c4f12f28912bb604703a79ccff769c1607f5a"
+ "91450f30ba0460d359d9126cbd6296be6d9c4bb96c0ee74cbb44197c207f6db3"
+ "26ab6f5a659113a9034e54be7b041ced9dcf6458d7fb9cbfb2744d999f7dfd63"
+ "f4",
+ 16,
+ ),
+ 0x3E53EF8D3112AF3285C0E74842090712CD324832D4277AE7,
+ 0xCC75F8952D30AEC2CBB719FC6AA9934590B5D0FF5A83ADB7,
+ 0x8285261607283BA18F335026130BAB31840DCFD9C3E555AF,
+ 0x356D89E1B04541AFC9704A45E9C535CE4A50929E33D7E06C,
+ True,
+ ),
+ (
+ generator_192,
+ int(
+ "0xf6227a8eeb34afed1621dcc89a91d72ea212cb2f476839d9b4243c66877911"
+ "b37b4ad6f4448792a7bbba76c63bdd63414b6facab7dc71c3396a73bd7ee14cd"
+ "d41a659c61c99b779cecf07bc51ab391aa3252386242b9853ea7da67fd768d30"
+ "3f1b9b513d401565b6f1eb722dfdb96b519fe4f9bd5de67ae131e64b40e78c42"
+ "dd",
+ 16,
+ ),
+ 0x16335DBE95F8E8254A4E04575D736BEFB258B8657F773CB7,
+ 0x421B13379C59BC9DCE38A1099CA79BBD06D647C7F6242336,
+ 0x4141BD5D64EA36C5B0BD21EF28C02DA216ED9D04522B1E91,
+ 0x159A6AA852BCC579E821B7BB0994C0861FB08280C38DAA09,
+ False,
+ ),
+ (
+ generator_192,
+ int(
+ "0x16b5f93afd0d02246f662761ed8e0dd9504681ed02a253006eb36736b56309"
+ "7ba39f81c8e1bce7a16c1339e345efabbc6baa3efb0612948ae51103382a8ee8"
+ "bc448e3ef71e9f6f7a9676694831d7f5dd0db5446f179bcb737d4a526367a447"
+ "bfe2c857521c7f40b6d7d7e01a180d92431fb0bbd29c04a0c420a57b3ed26ccd"
+ "8a",
+ 16,
+ ),
+ 0xFD14CDF1607F5EFB7B1793037B15BDF4BAA6F7C16341AB0B,
+ 0x83FA0795CC6C4795B9016DAC928FD6BAC32F3229A96312C4,
+ 0x8DFDB832951E0167C5D762A473C0416C5C15BC1195667DC1,
+ 0x1720288A2DC13FA1EC78F763F8FE2FF7354A7E6FDDE44520,
+ False,
+ ),
+ (
+ generator_192,
+ int(
+ "0x08a2024b61b79d260e3bb43ef15659aec89e5b560199bc82cf7c65c77d3919"
+ "2e03b9a895d766655105edd9188242b91fbde4167f7862d4ddd61e5d4ab55196"
+ "683d4f13ceb90d87aea6e07eb50a874e33086c4a7cb0273a8e1c4408f4b846bc"
+ "eae1ebaac1b2b2ea851a9b09de322efe34cebe601653efd6ddc876ce8c2f2072"
+ "fb",
+ 16,
+ ),
+ 0x674F941DC1A1F8B763C9334D726172D527B90CA324DB8828,
+ 0x65ADFA32E8B236CB33A3E84CF59BFB9417AE7E8EDE57A7FF,
+ 0x9508B9FDD7DAF0D8126F9E2BC5A35E4C6D800B5B804D7796,
+ 0x36F2BF6B21B987C77B53BB801B3435A577E3D493744BFAB0,
+ False,
+ ),
+ (
+ generator_192,
+ int(
+ "0x1843aba74b0789d4ac6b0b8923848023a644a7b70afa23b1191829bbe4397c"
+ "e15b629bf21a8838298653ed0c19222b95fa4f7390d1b4c844d96e645537e0aa"
+ "e98afb5c0ac3bd0e4c37f8daaff25556c64e98c319c52687c904c4de7240a1cc"
+ "55cd9756b7edaef184e6e23b385726e9ffcba8001b8f574987c1a3fedaaa83ca"
+ "6d",
+ 16,
+ ),
+ 0x10ECCA1AAD7220B56A62008B35170BFD5E35885C4014A19F,
+ 0x04EB61984C6C12ADE3BC47F3C629ECE7AA0A033B9948D686,
+ 0x82BFA4E82C0DFE9274169B86694E76CE993FD83B5C60F325,
+ 0xA97685676C59A65DBDE002FE9D613431FB183E8006D05633,
+ False,
+ ),
+ (
+ generator_192,
+ int(
+ "0x5a478f4084ddd1a7fea038aa9732a822106385797d02311aeef4d0264f824f"
+ "698df7a48cfb6b578cf3da416bc0799425bb491be5b5ecc37995b85b03420a98"
+ "f2c4dc5c31a69a379e9e322fbe706bbcaf0f77175e05cbb4fa162e0da82010a2"
+ "78461e3e974d137bc746d1880d6eb02aa95216014b37480d84b87f717bb13f76"
+ "e1",
+ 16,
+ ),
+ 0x6636653CB5B894CA65C448277B29DA3AD101C4C2300F7C04,
+ 0xFDF1CBB3FC3FD6A4F890B59E554544175FA77DBDBEB656C1,
+ 0xEAC2DDECDDFB79931A9C3D49C08DE0645C783A24CB365E1C,
+ 0x3549FEE3CFA7E5F93BC47D92D8BA100E881A2A93C22F8D50,
+ False,
+ ),
+ (
+ generator_192,
+ int(
+ "0xc598774259a058fa65212ac57eaa4f52240e629ef4c310722088292d1d4af6"
+ "c39b49ce06ba77e4247b20637174d0bd67c9723feb57b5ead232b47ea452d5d7"
+ "a089f17c00b8b6767e434a5e16c231ba0efa718a340bf41d67ea2d295812ff1b"
+ "9277daacb8bc27b50ea5e6443bcf95ef4e9f5468fe78485236313d53d1c68f6b"
+ "a2",
+ 16,
+ ),
+ 0xA82BD718D01D354001148CD5F69B9EBF38FF6F21898F8AAA,
+ 0xE67CEEDE07FC2EBFAFD62462A51E4B6C6B3D5B537B7CAF3E,
+ 0x4D292486C620C3DE20856E57D3BB72FCDE4A73AD26376955,
+ 0xA85289591A6081D5728825520E62FF1C64F94235C04C7F95,
+ False,
+ ),
+ (
+ generator_192,
+ int(
+ "0xca98ed9db081a07b7557f24ced6c7b9891269a95d2026747add9e9eb80638a"
+ "961cf9c71a1b9f2c29744180bd4c3d3db60f2243c5c0b7cc8a8d40a3f9a7fc91"
+ "0250f2187136ee6413ffc67f1a25e1c4c204fa9635312252ac0e0481d89b6d53"
+ "808f0c496ba87631803f6c572c1f61fa049737fdacce4adff757afed4f05beb6"
+ "58",
+ 16,
+ ),
+ 0x7D3B016B57758B160C4FCA73D48DF07AE3B6B30225126C2F,
+ 0x4AF3790D9775742BDE46F8DA876711BE1B65244B2B39E7EC,
+ 0x95F778F5F656511A5AB49A5D69DDD0929563C29CBC3A9E62,
+ 0x75C87FC358C251B4C83D2DD979FAAD496B539F9F2EE7A289,
+ False,
+ ),
+ (
+ generator_192,
+ int(
+ "0x31dd9a54c8338bea06b87eca813d555ad1850fac9742ef0bbe40dad400e102"
+ "88acc9c11ea7dac79eb16378ebea9490e09536099f1b993e2653cd50240014c9"
+ "0a9c987f64545abc6a536b9bd2435eb5e911fdfde2f13be96ea36ad38df4ae9e"
+ "a387b29cced599af777338af2794820c9cce43b51d2112380a35802ab7e396c9"
+ "7a",
+ 16,
+ ),
+ 0x9362F28C4EF96453D8A2F849F21E881CD7566887DA8BEB4A,
+ 0xE64D26D8D74C48A024AE85D982EE74CD16046F4EE5333905,
+ 0xF3923476A296C88287E8DE914B0B324AD5A963319A4FE73B,
+ 0xF0BAEED7624ED00D15244D8BA2AEDE085517DBDEC8AC65F5,
+ True,
+ ),
+ (
+ generator_192,
+ int(
+ "0xb2b94e4432267c92f9fdb9dc6040c95ffa477652761290d3c7de312283f645"
+ "0d89cc4aabe748554dfb6056b2d8e99c7aeaad9cdddebdee9dbc099839562d90"
+ "64e68e7bb5f3a6bba0749ca9a538181fc785553a4000785d73cc207922f63e8c"
+ "e1112768cb1de7b673aed83a1e4a74592f1268d8e2a4e9e63d414b5d442bd045"
+ "6d",
+ 16,
+ ),
+ 0xCC6FC032A846AAAC25533EB033522824F94E670FA997ECEF,
+ 0xE25463EF77A029ECCDA8B294FD63DD694E38D223D30862F1,
+ 0x066B1D07F3A40E679B620EDA7F550842A35C18B80C5EBE06,
+ 0xA0B0FB201E8F2DF65E2C4508EF303BDC90D934016F16B2DC,
+ False,
+ ),
+ (
+ generator_192,
+ int(
+ "0x4366fcadf10d30d086911de30143da6f579527036937007b337f7282460eae"
+ "5678b15cccda853193ea5fc4bc0a6b9d7a31128f27e1214988592827520b214e"
+ "ed5052f7775b750b0c6b15f145453ba3fee24a085d65287e10509eb5d5f602c4"
+ "40341376b95c24e5c4727d4b859bfe1483d20538acdd92c7997fa9c614f0f839"
+ "d7",
+ 16,
+ ),
+ 0x955C908FE900A996F7E2089BEE2F6376830F76A19135E753,
+ 0xBA0C42A91D3847DE4A592A46DC3FDAF45A7CC709B90DE520,
+ 0x1F58AD77FC04C782815A1405B0925E72095D906CBF52A668,
+ 0xF2E93758B3AF75EDF784F05A6761C9B9A6043C66B845B599,
+ False,
+ ),
+ (
+ generator_192,
+ int(
+ "0x543f8af57d750e33aa8565e0cae92bfa7a1ff78833093421c2942cadf99866"
+ "70a5ff3244c02a8225e790fbf30ea84c74720abf99cfd10d02d34377c3d3b412"
+ "69bea763384f372bb786b5846f58932defa68023136cd571863b304886e95e52"
+ "e7877f445b9364b3f06f3c28da12707673fecb4b8071de06b6e0a3c87da160ce"
+ "f3",
+ 16,
+ ),
+ 0x31F7FA05576D78A949B24812D4383107A9A45BB5FCCDD835,
+ 0x8DC0EB65994A90F02B5E19BD18B32D61150746C09107E76B,
+ 0xBE26D59E4E883DDE7C286614A767B31E49AD88789D3A78FF,
+ 0x8762CA831C1CE42DF77893C9B03119428E7A9B819B619068,
+ False,
+ ),
+ (
+ generator_192,
+ int(
+ "0xd2e8454143ce281e609a9d748014dcebb9d0bc53adb02443a6aac2ffe6cb009f"
+ "387c346ecb051791404f79e902ee333ad65e5c8cb38dc0d1d39a8dc90add502357"
+ "2720e5b94b190d43dd0d7873397504c0c7aef2727e628eb6a74411f2e400c65670"
+ "716cb4a815dc91cbbfeb7cfe8c929e93184c938af2c078584da045e8f8d1",
+ 16,
+ ),
+ 0x66AA8EDBBDB5CF8E28CEB51B5BDA891CAE2DF84819FE25C0,
+ 0x0C6BC2F69030A7CE58D4A00E3B3349844784A13B8936F8DA,
+ 0xA4661E69B1734F4A71B788410A464B71E7FFE42334484F23,
+ 0x738421CF5E049159D69C57A915143E226CAC8355E149AFE9,
+ False,
+ ),
+ (
+ generator_192,
+ int(
+ "0x6660717144040f3e2f95a4e25b08a7079c702a8b29babad5a19a87654bc5c5af"
+ "a261512a11b998a4fb36b5d8fe8bd942792ff0324b108120de86d63f65855e5461"
+ "184fc96a0a8ffd2ce6d5dfb0230cbbdd98f8543e361b3205f5da3d500fdc8bac6d"
+ "b377d75ebef3cb8f4d1ff738071ad0938917889250b41dd1d98896ca06fb",
+ 16,
+ ),
+ 0xBCFACF45139B6F5F690A4C35A5FFFA498794136A2353FC77,
+ 0x6F4A6C906316A6AFC6D98FE1F0399D056F128FE0270B0F22,
+ 0x9DB679A3DAFE48F7CCAD122933ACFE9DA0970B71C94C21C1,
+ 0x984C2DB99827576C0A41A5DA41E07D8CC768BC82F18C9DA9,
+ False,
+ ),
+]
+
+
[email protected]("gen,msg,qx,qy,r,s,expected", CURVE_192_KATS)
+def test_signature_validity(gen, msg, qx, qy, r, s, expected):
+ """
+ `msg` = message, `qx` and `qy` represent the base point on
+ elliptic curve of `gen`, `r` and `s` are the signature, and
+ `expected` is True iff the signature is expected to be valid."""
+ pubk = Public_key(gen, ellipticcurve.Point(gen.curve(), qx, qy))
+ assert expected == pubk.verifies(digest_integer(msg), Signature(r, s))
+
+
+ "gen,msg,qx,qy,r,s,expected", [x for x in CURVE_192_KATS if x[6]]
+)
+def test_pk_recovery(gen, msg, r, s, qx, qy, expected):
+ del expected
+ sign = Signature(r, s)
+ pks = sign.recover_public_keys(digest_integer(msg), gen)
+
+ assert pks
+
+ # Test if the signature is valid for all found public keys
+ for pk in pks:
+ q = pk.point
+ test_signature_validity(gen, msg, q.x(), q.y(), r, s, True)
+
+ # Test if the original public key is in the set of found keys
+ original_q = ellipticcurve.Point(gen.curve(), qx, qy)
+ points = [pk.point for pk in pks]
+ assert original_q in points
+
+
+def st_random_gen_key_msg_nonce(draw):
+ """Hypothesis strategy for test_sig_verify()."""
+ name_gen = {
+ "generator_192": generator_192,
+ "generator_224": generator_224,
+ "generator_256": generator_256,
+ "generator_secp256k1": generator_secp256k1,
+ "generator_384": generator_384,
+ "generator_521": generator_521,
+ }
+ name = draw(st.sampled_from(sorted(name_gen.keys())))
+ note("Generator used: {0}".format(name))
+ generator = name_gen[name]
+ order = int(generator.order())
+
+ key = draw(st.integers(min_value=1, max_value=order))
+ msg = draw(st.integers(min_value=1, max_value=order))
+ nonce = draw(
+ st.integers(min_value=1, max_value=order + 1)
+ | st.integers(min_value=order >> 1, max_value=order)
+ )
+ return generator, key, msg, nonce
+
+
+SIG_VER_SETTINGS = dict(HYP_SETTINGS)
+SIG_VER_SETTINGS["max_examples"] = 10
+
+
+@settings(**SIG_VER_SETTINGS)
+@example((generator_224, 4, 1, 1))
+@given(st_random_gen_key_msg_nonce())
+def test_sig_verify(args):
+ """
+ Check if signing and verification works for arbitrary messages and
+ that signatures for other messages are rejected.
+ """
+ generator, sec_mult, msg, nonce = args
+
+ pubkey = Public_key(generator, generator * sec_mult)
+ privkey = Private_key(pubkey, sec_mult)
+
+ signature = privkey.sign(msg, nonce)
+
+ assert pubkey.verifies(msg, signature)
+
+ assert not pubkey.verifies(msg - 1, signature)
diff --git a/frozen_deps/ecdsa/test_ellipticcurve.py b/frozen_deps/ecdsa/test_ellipticcurve.py
new file mode 100644
index 0000000..def53b2
--- /dev/null
+++ b/frozen_deps/ecdsa/test_ellipticcurve.py
@@ -0,0 +1,200 @@
+import pytest
+from six import print_
+
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+from hypothesis import given, settings
+import hypothesis.strategies as st
+
+try:
+ from hypothesis import HealthCheck
+
+ HC_PRESENT = True
+except ImportError: # pragma: no cover
+ HC_PRESENT = False
+from .numbertheory import inverse_mod
+from .ellipticcurve import CurveFp, INFINITY, Point
+
+
+HYP_SETTINGS = {}
+if HC_PRESENT: # pragma: no branch
+ HYP_SETTINGS["suppress_health_check"] = [HealthCheck.too_slow]
+ HYP_SETTINGS["deadline"] = 5000
+
+
+# NIST Curve P-192:
+p = 6277101735386680763835789423207666416083908700390324961279
+r = 6277101735386680763835789423176059013767194773182842284081
+# s = 0x3045ae6fc8422f64ed579528d38120eae12196d5
+# c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65
+b = 0x64210519E59C80E70FA7E9AB72243049FEB8DEECC146B9B1
+Gx = 0x188DA80EB03090F67CBF20EB43A18800F4FF0AFD82FF1012
+Gy = 0x07192B95FFC8DA78631011ED6B24CDD573F977A11E794811
+
+c192 = CurveFp(p, -3, b)
+p192 = Point(c192, Gx, Gy, r)
+
+c_23 = CurveFp(23, 1, 1)
+g_23 = Point(c_23, 13, 7, 7)
+
+
+HYP_SLOW_SETTINGS = dict(HYP_SETTINGS)
+HYP_SLOW_SETTINGS["max_examples"] = 10
+
+
+@settings(**HYP_SLOW_SETTINGS)
+@given(st.integers(min_value=1, max_value=r + 1))
+def test_p192_mult_tests(multiple):
+ inv_m = inverse_mod(multiple, r)
+
+ p1 = p192 * multiple
+ assert p1 * inv_m == p192
+
+
+def add_n_times(point, n):
+ ret = INFINITY
+ i = 0
+ while i <= n:
+ yield ret
+ ret = ret + point
+ i += 1
+
+
+# From X9.62 I.1 (p. 96):
+ "p, m, check",
+ [(g_23, n, exp) for n, exp in enumerate(add_n_times(g_23, 8))],
+ ids=["g_23 test with mult {0}".format(i) for i in range(9)],
+)
+def test_add_and_mult_equivalence(p, m, check):
+ assert p * m == check
+
+
+class TestCurve(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ cls.c_23 = CurveFp(23, 1, 1)
+
+ def test_equality_curves(self):
+ self.assertEqual(self.c_23, CurveFp(23, 1, 1))
+
+ def test_inequality_curves(self):
+ c192 = CurveFp(p, -3, b)
+ self.assertNotEqual(self.c_23, c192)
+
+ def test_usability_in_a_hashed_collection_curves(self):
+ {self.c_23: None}
+
+ def test_hashability_curves(self):
+ hash(self.c_23)
+
+ def test_conflation_curves(self):
+ ne1, ne2, ne3 = CurveFp(24, 1, 1), CurveFp(23, 2, 1), CurveFp(23, 1, 2)
+ eq1, eq2, eq3 = CurveFp(23, 1, 1), CurveFp(23, 1, 1), self.c_23
+ self.assertEqual(len(set((c_23, eq1, eq2, eq3))), 1)
+ self.assertEqual(len(set((c_23, ne1, ne2, ne3))), 4)
+ self.assertDictEqual({c_23: None}, {eq1: None})
+ self.assertTrue(eq2 in {eq3: None})
+
+
+class TestPoint(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ cls.c_23 = CurveFp(23, 1, 1)
+ cls.g_23 = Point(cls.c_23, 13, 7, 7)
+
+ p = 6277101735386680763835789423207666416083908700390324961279
+ r = 6277101735386680763835789423176059013767194773182842284081
+ # s = 0x3045ae6fc8422f64ed579528d38120eae12196d5
+ # c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65
+ b = 0x64210519E59C80E70FA7E9AB72243049FEB8DEECC146B9B1
+ Gx = 0x188DA80EB03090F67CBF20EB43A18800F4FF0AFD82FF1012
+ Gy = 0x07192B95FFC8DA78631011ED6B24CDD573F977A11E794811
+
+ cls.c192 = CurveFp(p, -3, b)
+ cls.p192 = Point(cls.c192, Gx, Gy, r)
+
+ def test_p192(self):
+ # Checking against some sample computations presented
+ # in X9.62:
+ d = 651056770906015076056810763456358567190100156695615665659
+ Q = d * self.p192
+ self.assertEqual(
+ Q.x(), 0x62B12D60690CDCF330BABAB6E69763B471F994DD702D16A5
+ )
+
+ k = 6140507067065001063065065565667405560006161556565665656654
+ R = k * self.p192
+ self.assertEqual(
+ R.x(), 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD
+ )
+ self.assertEqual(
+ R.y(), 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835
+ )
+
+ u1 = 2563697409189434185194736134579731015366492496392189760599
+ u2 = 6266643813348617967186477710235785849136406323338782220568
+ temp = u1 * self.p192 + u2 * Q
+ self.assertEqual(
+ temp.x(), 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD
+ )
+ self.assertEqual(
+ temp.y(), 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835
+ )
+
+ def test_double_infinity(self):
+ p1 = INFINITY
+ p3 = p1.double()
+ self.assertEqual(p1, p3)
+ self.assertEqual(p3.x(), p1.x())
+ self.assertEqual(p3.y(), p3.y())
+
+ def test_double(self):
+ x1, y1, x3, y3 = (3, 10, 7, 12)
+
+ p1 = Point(self.c_23, x1, y1)
+ p3 = p1.double()
+ self.assertEqual(p3.x(), x3)
+ self.assertEqual(p3.y(), y3)
+
+ def test_multiply(self):
+ x1, y1, m, x3, y3 = (3, 10, 2, 7, 12)
+ p1 = Point(self.c_23, x1, y1)
+ p3 = p1 * m
+ self.assertEqual(p3.x(), x3)
+ self.assertEqual(p3.y(), y3)
+
+ # Trivial tests from X9.62 B.3:
+ def test_add(self):
+ """We expect that on curve c, (x1,y1) + (x2, y2 ) = (x3, y3)."""
+
+ x1, y1, x2, y2, x3, y3 = (3, 10, 9, 7, 17, 20)
+ p1 = Point(self.c_23, x1, y1)
+ p2 = Point(self.c_23, x2, y2)
+ p3 = p1 + p2
+ self.assertEqual(p3.x(), x3)
+ self.assertEqual(p3.y(), y3)
+
+ def test_add_as_double(self):
+ """We expect that on curve c, (x1,y1) + (x2, y2 ) = (x3, y3)."""
+
+ x1, y1, x2, y2, x3, y3 = (3, 10, 3, 10, 7, 12)
+ p1 = Point(self.c_23, x1, y1)
+ p2 = Point(self.c_23, x2, y2)
+ p3 = p1 + p2
+ self.assertEqual(p3.x(), x3)
+ self.assertEqual(p3.y(), y3)
+
+ def test_equality_points(self):
+ self.assertEqual(self.g_23, Point(self.c_23, 13, 7, 7))
+
+ def test_inequality_points(self):
+ c = CurveFp(100, -3, 100)
+ p = Point(c, 100, 100, 100)
+ self.assertNotEqual(self.g_23, p)
+
+ def test_inequality_points_diff_types(self):
+ c = CurveFp(100, -3, 100)
+ self.assertNotEqual(self.g_23, c)
diff --git a/frozen_deps/ecdsa/test_jacobi.py b/frozen_deps/ecdsa/test_jacobi.py
new file mode 100644
index 0000000..43ed6c1
--- /dev/null
+++ b/frozen_deps/ecdsa/test_jacobi.py
@@ -0,0 +1,438 @@
+import pickle
+
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+
+import hypothesis.strategies as st
+from hypothesis import given, assume, settings, example
+
+from .ellipticcurve import CurveFp, Point, PointJacobi, INFINITY
+from .ecdsa import (
+ generator_256,
+ curve_256,
+ generator_224,
+ generator_brainpoolp160r1,
+ curve_brainpoolp160r1,
+)
+from .numbertheory import inverse_mod
+
+
+class TestJacobi(unittest.TestCase):
+ def test___init__(self):
+ curve = object()
+ x = 2
+ y = 3
+ z = 1
+ order = 4
+ pj = PointJacobi(curve, x, y, z, order)
+
+ self.assertEqual(pj.order(), order)
+ self.assertIs(pj.curve(), curve)
+ self.assertEqual(pj.x(), x)
+ self.assertEqual(pj.y(), y)
+
+ def test_add_with_different_curves(self):
+ p_a = PointJacobi.from_affine(generator_256)
+ p_b = PointJacobi.from_affine(generator_224)
+
+ with self.assertRaises(ValueError):
+ p_a + p_b
+
+ def test_compare_different_curves(self):
+ self.assertNotEqual(generator_256, generator_224)
+
+ def test_equality_with_non_point(self):
+ pj = PointJacobi.from_affine(generator_256)
+
+ self.assertNotEqual(pj, "value")
+
+ def test_conversion(self):
+ pj = PointJacobi.from_affine(generator_256)
+ pw = pj.to_affine()
+
+ self.assertEqual(generator_256, pw)
+
+ def test_single_double(self):
+ pj = PointJacobi.from_affine(generator_256)
+ pw = generator_256.double()
+
+ pj = pj.double()
+
+ self.assertEqual(pj.x(), pw.x())
+ self.assertEqual(pj.y(), pw.y())
+
+ def test_double_with_zero_point(self):
+ pj = PointJacobi(curve_256, 0, 0, 1)
+
+ pj = pj.double()
+
+ self.assertIs(pj, INFINITY)
+
+ def test_double_with_zero_equivalent_point(self):
+ pj = PointJacobi(curve_256, 0, curve_256.p(), 1)
+
+ pj = pj.double()
+
+ self.assertIs(pj, INFINITY)
+
+ def test_double_with_zero_equivalent_point_non_1_z(self):
+ pj = PointJacobi(curve_256, 0, curve_256.p(), 2)
+
+ pj = pj.double()
+
+ self.assertIs(pj, INFINITY)
+
+ def test_compare_with_affine_point(self):
+ pj = PointJacobi.from_affine(generator_256)
+ pa = pj.to_affine()
+
+ self.assertEqual(pj, pa)
+ self.assertEqual(pa, pj)
+
+ def test_to_affine_with_zero_point(self):
+ pj = PointJacobi(curve_256, 0, 0, 1)
+
+ pa = pj.to_affine()
+
+ self.assertIs(pa, INFINITY)
+
+ def test_add_with_affine_point(self):
+ pj = PointJacobi.from_affine(generator_256)
+ pa = pj.to_affine()
+
+ s = pj + pa
+
+ self.assertEqual(s, pj.double())
+
+ def test_radd_with_affine_point(self):
+ pj = PointJacobi.from_affine(generator_256)
+ pa = pj.to_affine()
+
+ s = pa + pj
+
+ self.assertEqual(s, pj.double())
+
+ def test_add_with_infinity(self):
+ pj = PointJacobi.from_affine(generator_256)
+
+ s = pj + INFINITY
+
+ self.assertEqual(s, pj)
+
+ def test_add_zero_point_to_affine(self):
+ pa = PointJacobi.from_affine(generator_256).to_affine()
+ pj = PointJacobi(curve_256, 0, 0, 1)
+
+ s = pj + pa
+
+ self.assertIs(s, pa)
+
+ def test_multiply_by_zero(self):
+ pj = PointJacobi.from_affine(generator_256)
+
+ pj = pj * 0
+
+ self.assertIs(pj, INFINITY)
+
+ def test_zero_point_multiply_by_one(self):
+ pj = PointJacobi(curve_256, 0, 0, 1)
+
+ pj = pj * 1
+
+ self.assertIs(pj, INFINITY)
+
+ def test_multiply_by_one(self):
+ pj = PointJacobi.from_affine(generator_256)
+ pw = generator_256 * 1
+
+ pj = pj * 1
+
+ self.assertEqual(pj.x(), pw.x())
+ self.assertEqual(pj.y(), pw.y())
+
+ def test_multiply_by_two(self):
+ pj = PointJacobi.from_affine(generator_256)
+ pw = generator_256 * 2
+
+ pj = pj * 2
+
+ self.assertEqual(pj.x(), pw.x())
+ self.assertEqual(pj.y(), pw.y())
+
+ def test_rmul_by_two(self):
+ pj = PointJacobi.from_affine(generator_256)
+ pw = generator_256 * 2
+
+ pj = 2 * pj
+
+ self.assertEqual(pj, pw)
+
+ def test_compare_non_zero_with_infinity(self):
+ pj = PointJacobi.from_affine(generator_256)
+
+ self.assertNotEqual(pj, INFINITY)
+
+ def test_compare_zero_point_with_infinity(self):
+ pj = PointJacobi(curve_256, 0, 0, 1)
+
+ self.assertEqual(pj, INFINITY)
+
+ def test_compare_double_with_multiply(self):
+ pj = PointJacobi.from_affine(generator_256)
+ dbl = pj.double()
+ mlpl = pj * 2
+
+ self.assertEqual(dbl, mlpl)
+
+ @settings(max_examples=10)
+ @given(
+ st.integers(
+ min_value=0, max_value=int(generator_brainpoolp160r1.order())
+ )
+ )
+ def test_multiplications(self, mul):
+ pj = PointJacobi.from_affine(generator_brainpoolp160r1)
+ pw = pj.to_affine() * mul
+
+ pj = pj * mul
+
+ self.assertEqual((pj.x(), pj.y()), (pw.x(), pw.y()))
+ self.assertEqual(pj, pw)
+
+ @settings(max_examples=10)
+ @given(
+ st.integers(
+ min_value=0, max_value=int(generator_brainpoolp160r1.order())
+ )
+ )
+ @example(0)
+ @example(int(generator_brainpoolp160r1.order()))
+ def test_precompute(self, mul):
+ precomp = PointJacobi.from_affine(generator_brainpoolp160r1, True)
+ pj = PointJacobi.from_affine(generator_brainpoolp160r1)
+
+ a = precomp * mul
+ b = pj * mul
+
+ self.assertEqual(a, b)
+
+ @settings(max_examples=10)
+ @given(
+ st.integers(
+ min_value=1, max_value=int(generator_brainpoolp160r1.order())
+ ),
+ st.integers(
+ min_value=1, max_value=int(generator_brainpoolp160r1.order())
+ ),
+ )
+ @example(3, 3)
+ def test_add_scaled_points(self, a_mul, b_mul):
+ j_g = PointJacobi.from_affine(generator_brainpoolp160r1)
+ a = PointJacobi.from_affine(j_g * a_mul)
+ b = PointJacobi.from_affine(j_g * b_mul)
+
+ c = a + b
+
+ self.assertEqual(c, j_g * (a_mul + b_mul))
+
+ @settings(max_examples=10)
+ @given(
+ st.integers(
+ min_value=1, max_value=int(generator_brainpoolp160r1.order())
+ ),
+ st.integers(
+ min_value=1, max_value=int(generator_brainpoolp160r1.order())
+ ),
+ st.integers(min_value=1, max_value=int(curve_brainpoolp160r1.p() - 1)),
+ )
+ def test_add_one_scaled_point(self, a_mul, b_mul, new_z):
+ j_g = PointJacobi.from_affine(generator_brainpoolp160r1)
+ a = PointJacobi.from_affine(j_g * a_mul)
+ b = PointJacobi.from_affine(j_g * b_mul)
+
+ p = curve_brainpoolp160r1.p()
+
+ assume(inverse_mod(new_z, p))
+
+ new_zz = new_z * new_z % p
+
+ b = PointJacobi(
+ curve_brainpoolp160r1,
+ b.x() * new_zz % p,
+ b.y() * new_zz * new_z % p,
+ new_z,
+ )
+
+ c = a + b
+
+ self.assertEqual(c, j_g * (a_mul + b_mul))
+
+ @settings(max_examples=10)
+ @given(
+ st.integers(
+ min_value=1, max_value=int(generator_brainpoolp160r1.order())
+ ),
+ st.integers(
+ min_value=1, max_value=int(generator_brainpoolp160r1.order())
+ ),
+ st.integers(min_value=1, max_value=int(curve_brainpoolp160r1.p() - 1)),
+ )
+ @example(1, 1, 1)
+ @example(3, 3, 3)
+ @example(2, int(generator_brainpoolp160r1.order() - 2), 1)
+ @example(2, int(generator_brainpoolp160r1.order() - 2), 3)
+ def test_add_same_scale_points(self, a_mul, b_mul, new_z):
+ j_g = PointJacobi.from_affine(generator_brainpoolp160r1)
+ a = PointJacobi.from_affine(j_g * a_mul)
+ b = PointJacobi.from_affine(j_g * b_mul)
+
+ p = curve_brainpoolp160r1.p()
+
+ assume(inverse_mod(new_z, p))
+
+ new_zz = new_z * new_z % p
+
+ a = PointJacobi(
+ curve_brainpoolp160r1,
+ a.x() * new_zz % p,
+ a.y() * new_zz * new_z % p,
+ new_z,
+ )
+ b = PointJacobi(
+ curve_brainpoolp160r1,
+ b.x() * new_zz % p,
+ b.y() * new_zz * new_z % p,
+ new_z,
+ )
+
+ c = a + b
+
+ self.assertEqual(c, j_g * (a_mul + b_mul))
+
+ @settings(max_examples=14)
+ @given(
+ st.integers(
+ min_value=1, max_value=int(generator_brainpoolp160r1.order())
+ ),
+ st.integers(
+ min_value=1, max_value=int(generator_brainpoolp160r1.order())
+ ),
+ st.lists(
+ st.integers(
+ min_value=1, max_value=int(curve_brainpoolp160r1.p() - 1)
+ ),
+ min_size=2,
+ max_size=2,
+ unique=True,
+ ),
+ )
+ @example(2, 2, [2, 1])
+ @example(2, 2, [2, 3])
+ @example(2, int(generator_brainpoolp160r1.order() - 2), [2, 3])
+ @example(2, int(generator_brainpoolp160r1.order() - 2), [2, 1])
+ def test_add_different_scale_points(self, a_mul, b_mul, new_z):
+ j_g = PointJacobi.from_affine(generator_brainpoolp160r1)
+ a = PointJacobi.from_affine(j_g * a_mul)
+ b = PointJacobi.from_affine(j_g * b_mul)
+
+ p = curve_brainpoolp160r1.p()
+
+ assume(inverse_mod(new_z[0], p))
+ assume(inverse_mod(new_z[1], p))
+
+ new_zz0 = new_z[0] * new_z[0] % p
+ new_zz1 = new_z[1] * new_z[1] % p
+
+ a = PointJacobi(
+ curve_brainpoolp160r1,
+ a.x() * new_zz0 % p,
+ a.y() * new_zz0 * new_z[0] % p,
+ new_z[0],
+ )
+ b = PointJacobi(
+ curve_brainpoolp160r1,
+ b.x() * new_zz1 % p,
+ b.y() * new_zz1 * new_z[1] % p,
+ new_z[1],
+ )
+
+ c = a + b
+
+ self.assertEqual(c, j_g * (a_mul + b_mul))
+
+ def test_add_point_3_times(self):
+ j_g = PointJacobi.from_affine(generator_256)
+
+ self.assertEqual(j_g * 3, j_g + j_g + j_g)
+
+ def test_mul_add_inf(self):
+ j_g = PointJacobi.from_affine(generator_256)
+
+ self.assertEqual(j_g, j_g.mul_add(1, INFINITY, 1))
+
+ def test_mul_add_same(self):
+ j_g = PointJacobi.from_affine(generator_256)
+
+ self.assertEqual(j_g * 2, j_g.mul_add(1, j_g, 1))
+
+ def test_mul_add_precompute(self):
+ j_g = PointJacobi.from_affine(generator_256, True)
+ b = PointJacobi.from_affine(j_g * 255, True)
+
+ self.assertEqual(j_g * 256, j_g + b)
+ self.assertEqual(j_g * (5 + 255 * 7), j_g * 5 + b * 7)
+ self.assertEqual(j_g * (5 + 255 * 7), j_g.mul_add(5, b, 7))
+
+ def test_mul_add_precompute_large(self):
+ j_g = PointJacobi.from_affine(generator_256, True)
+ b = PointJacobi.from_affine(j_g * 255, True)
+
+ self.assertEqual(j_g * 256, j_g + b)
+ self.assertEqual(
+ j_g * (0xFF00 + 255 * 0xF0F0), j_g * 0xFF00 + b * 0xF0F0
+ )
+ self.assertEqual(
+ j_g * (0xFF00 + 255 * 0xF0F0), j_g.mul_add(0xFF00, b, 0xF0F0)
+ )
+
+ def test_mul_add_to_mul(self):
+ j_g = PointJacobi.from_affine(generator_256)
+
+ a = j_g * 3
+ b = j_g.mul_add(2, j_g, 1)
+
+ self.assertEqual(a, b)
+
+ def test_mul_add(self):
+ j_g = PointJacobi.from_affine(generator_256)
+
+ w_a = generator_256 * 255
+ w_b = generator_256 * (0xA8 * 0xF0)
+ j_b = j_g * 0xA8
+
+ ret = j_g.mul_add(255, j_b, 0xF0)
+
+ self.assertEqual(ret.to_affine(), w_a + w_b)
+
+ def test_mul_add_large(self):
+ j_g = PointJacobi.from_affine(generator_256)
+ b = PointJacobi.from_affine(j_g * 255)
+
+ self.assertEqual(j_g * 256, j_g + b)
+ self.assertEqual(
+ j_g * (0xFF00 + 255 * 0xF0F0), j_g * 0xFF00 + b * 0xF0F0
+ )
+ self.assertEqual(
+ j_g * (0xFF00 + 255 * 0xF0F0), j_g.mul_add(0xFF00, b, 0xF0F0)
+ )
+
+ def test_equality(self):
+ pj1 = PointJacobi(curve=CurveFp(23, 1, 1, 1), x=2, y=3, z=1, order=1)
+ pj2 = PointJacobi(curve=CurveFp(23, 1, 1, 1), x=2, y=3, z=1, order=1)
+ self.assertEqual(pj1, pj2)
+
+ def test_pickle(self):
+ pj = PointJacobi(curve=CurveFp(23, 1, 1, 1), x=2, y=3, z=1, order=1)
+ self.assertEqual(pickle.loads(pickle.dumps(pj)), pj)
diff --git a/frozen_deps/ecdsa/test_keys.py b/frozen_deps/ecdsa/test_keys.py
new file mode 100644
index 0000000..406a5bf
--- /dev/null
+++ b/frozen_deps/ecdsa/test_keys.py
@@ -0,0 +1,454 @@
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+
+try:
+ buffer
+except NameError:
+ buffer = memoryview
+
+import array
+import six
+import sys
+import pytest
+import hashlib
+
+from .keys import VerifyingKey, SigningKey
+from .der import unpem
+from .util import (
+ sigencode_string,
+ sigencode_der,
+ sigencode_strings,
+ sigdecode_string,
+ sigdecode_der,
+ sigdecode_strings,
+)
+from .curves import NIST256p
+
+
+class TestVerifyingKeyFromString(unittest.TestCase):
+ """
+ Verify that ecdsa.keys.VerifyingKey.from_string() can be used with
+ bytes-like objects
+ """
+
+ @classmethod
+ def setUpClass(cls):
+ cls.key_bytes = (
+ b"\x04L\xa2\x95\xdb\xc7Z\xd7\x1f\x93\nz\xcf\x97\xcf"
+ b"\xd7\xc2\xd9o\xfe8}X!\xae\xd4\xfah\xfa^\rpI\xba\xd1"
+ b"Y\xfb\x92xa\xebo+\x9cG\xfav\xca"
+ )
+ cls.vk = VerifyingKey.from_string(cls.key_bytes)
+
+ def test_bytes(self):
+ self.assertIsNotNone(self.vk)
+ self.assertIsInstance(self.vk, VerifyingKey)
+ self.assertEqual(
+ self.vk.pubkey.point.x(),
+ 105419898848891948935835657980914000059957975659675736097,
+ )
+ self.assertEqual(
+ self.vk.pubkey.point.y(),
+ 4286866841217412202667522375431381222214611213481632495306,
+ )
+
+ def test_bytes_memoryview(self):
+ vk = VerifyingKey.from_string(buffer(self.key_bytes))
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_bytearray(self):
+ vk = VerifyingKey.from_string(bytearray(self.key_bytes))
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_bytesarray_memoryview(self):
+ vk = VerifyingKey.from_string(buffer(bytearray(self.key_bytes)))
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_array_array_of_bytes(self):
+ arr = array.array("B", self.key_bytes)
+ vk = VerifyingKey.from_string(arr)
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_array_array_of_bytes_memoryview(self):
+ arr = array.array("B", self.key_bytes)
+ vk = VerifyingKey.from_string(buffer(arr))
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_array_array_of_ints(self):
+ arr = array.array("I", self.key_bytes)
+ vk = VerifyingKey.from_string(arr)
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_array_array_of_ints_memoryview(self):
+ arr = array.array("I", self.key_bytes)
+ vk = VerifyingKey.from_string(buffer(arr))
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_bytes_uncompressed(self):
+ vk = VerifyingKey.from_string(b"\x04" + self.key_bytes)
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_bytearray_uncompressed(self):
+ vk = VerifyingKey.from_string(bytearray(b"\x04" + self.key_bytes))
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_bytes_compressed(self):
+ vk = VerifyingKey.from_string(b"\x02" + self.key_bytes[:24])
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_bytearray_compressed(self):
+ vk = VerifyingKey.from_string(bytearray(b"\x02" + self.key_bytes[:24]))
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+
+class TestVerifyingKeyFromDer(unittest.TestCase):
+ """
+ Verify that ecdsa.keys.VerifyingKey.from_der() can be used with
+ bytes-like objects.
+ """
+
+ @classmethod
+ def setUpClass(cls):
+ prv_key_str = (
+ "-----BEGIN EC PRIVATE KEY-----\n"
+ "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n"
+ "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n"
+ "bA==\n"
+ "-----END EC PRIVATE KEY-----\n"
+ )
+ key_str = (
+ "-----BEGIN PUBLIC KEY-----\n"
+ "MEkwEwYHKoZIzj0CAQYIKoZIzj0DAQEDMgAEuIF30ITvF/XkVjlAgCg2D59ZtKTX\n"
+ "Jk5i2gZR3OR6NaTFtFz1FZNCOotVe5wgmfNs\n"
+ "-----END PUBLIC KEY-----\n"
+ )
+ cls.key_pem = key_str
+
+ cls.key_bytes = unpem(key_str)
+ assert isinstance(cls.key_bytes, bytes)
+ cls.vk = VerifyingKey.from_pem(key_str)
+ cls.sk = SigningKey.from_pem(prv_key_str)
+
+ key_str = (
+ "-----BEGIN PUBLIC KEY-----\n"
+ "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE4H3iRbG4TSrsSRb/gusPQB/4YcN8\n"
+ "Poqzgjau4kfxBPyZimeRfuY/9g/wMmPuhGl4BUve51DsnKJFRr8psk0ieA==\n"
+ "-----END PUBLIC KEY-----\n"
+ )
+ cls.vk2 = VerifyingKey.from_pem(key_str)
+
+ def test_custom_hashfunc(self):
+ vk = VerifyingKey.from_der(self.key_bytes, hashlib.sha256)
+
+ self.assertIs(vk.default_hashfunc, hashlib.sha256)
+
+ def test_from_pem_with_custom_hashfunc(self):
+ vk = VerifyingKey.from_pem(self.key_pem, hashlib.sha256)
+
+ self.assertIs(vk.default_hashfunc, hashlib.sha256)
+
+ def test_bytes(self):
+ vk = VerifyingKey.from_der(self.key_bytes)
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_bytes_memoryview(self):
+ vk = VerifyingKey.from_der(buffer(self.key_bytes))
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_bytearray(self):
+ vk = VerifyingKey.from_der(bytearray(self.key_bytes))
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_bytesarray_memoryview(self):
+ vk = VerifyingKey.from_der(buffer(bytearray(self.key_bytes)))
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_array_array_of_bytes(self):
+ arr = array.array("B", self.key_bytes)
+ vk = VerifyingKey.from_der(arr)
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_array_array_of_bytes_memoryview(self):
+ arr = array.array("B", self.key_bytes)
+ vk = VerifyingKey.from_der(buffer(arr))
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_equality_on_verifying_keys(self):
+ self.assertEqual(self.vk, self.sk.get_verifying_key())
+
+ def test_inequality_on_verifying_keys(self):
+ self.assertNotEqual(self.vk, self.vk2)
+
+ def test_inequality_on_verifying_keys_not_implemented(self):
+ self.assertNotEqual(self.vk, None)
+
+
+class TestSigningKey(unittest.TestCase):
+ """
+ Verify that ecdsa.keys.SigningKey.from_der() can be used with
+ bytes-like objects.
+ """
+
+ @classmethod
+ def setUpClass(cls):
+ prv_key_str = (
+ "-----BEGIN EC PRIVATE KEY-----\n"
+ "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n"
+ "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n"
+ "bA==\n"
+ "-----END EC PRIVATE KEY-----\n"
+ )
+ cls.sk1 = SigningKey.from_pem(prv_key_str)
+
+ prv_key_str = (
+ "-----BEGIN PRIVATE KEY-----\n"
+ "MG8CAQAwEwYHKoZIzj0CAQYIKoZIzj0DAQEEVTBTAgEBBBheyEIL1u+SUqlC6YkE\n"
+ "PKKfVh+lJXcOscWhNAMyAAS4gXfQhO8X9eRWOUCAKDYPn1m0pNcmTmLaBlHc5Ho1\n"
+ "pMW0XPUVk0I6i1V7nCCZ82w=\n"
+ "-----END PRIVATE KEY-----\n"
+ )
+ cls.sk1_pkcs8 = SigningKey.from_pem(prv_key_str)
+
+ prv_key_str = (
+ "-----BEGIN EC PRIVATE KEY-----\n"
+ "MHcCAQEEIKlL2EAm5NPPZuXwxRf4nXMk0A80y6UUbiQ17be/qFhRoAoGCCqGSM49\n"
+ "AwEHoUQDQgAE4H3iRbG4TSrsSRb/gusPQB/4YcN8Poqzgjau4kfxBPyZimeRfuY/\n"
+ "9g/wMmPuhGl4BUve51DsnKJFRr8psk0ieA==\n"
+ "-----END EC PRIVATE KEY-----\n"
+ )
+ cls.sk2 = SigningKey.from_pem(prv_key_str)
+
+ def test_equality_on_signing_keys(self):
+ sk = SigningKey.from_secret_exponent(
+ self.sk1.privkey.secret_multiplier, self.sk1.curve
+ )
+ self.assertEqual(self.sk1, sk)
+ self.assertEqual(self.sk1_pkcs8, sk)
+
+ def test_verify_with_precompute(self):
+ sig = self.sk1.sign(b"message")
+
+ vk = self.sk1.verifying_key
+
+ vk.precompute()
+
+ self.assertTrue(vk.verify(sig, b"message"))
+
+ def test_compare_verifying_key_with_precompute(self):
+ vk1 = self.sk1.verifying_key
+ vk1.precompute()
+
+ vk2 = self.sk1_pkcs8.verifying_key
+
+ self.assertEqual(vk1, vk2)
+
+ def test_verify_with_lazy_precompute(self):
+ sig = self.sk2.sign(b"other message")
+
+ vk = self.sk2.verifying_key
+
+ vk.precompute(lazy=True)
+
+ self.assertTrue(vk.verify(sig, b"other message"))
+
+ def test_inequality_on_signing_keys(self):
+ self.assertNotEqual(self.sk1, self.sk2)
+
+ def test_inequality_on_signing_keys_not_implemented(self):
+ self.assertNotEqual(self.sk1, None)
+
+
+# test VerifyingKey.verify()
+prv_key_str = (
+ "-----BEGIN EC PRIVATE KEY-----\n"
+ "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n"
+ "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n"
+ "bA==\n"
+ "-----END EC PRIVATE KEY-----\n"
+)
+key_bytes = unpem(prv_key_str)
+assert isinstance(key_bytes, bytes)
+sk = SigningKey.from_der(key_bytes)
+vk = sk.verifying_key
+
+data = (
+ b"some string for signing"
+ b"contents don't really matter"
+ b"but do include also some crazy values: "
+ b"\x00\x01\t\r\n\x00\x00\x00\xff\xf0"
+)
+assert len(data) % 4 == 0
+sha1 = hashlib.sha1()
+sha1.update(data)
+data_hash = sha1.digest()
+assert isinstance(data_hash, bytes)
+sig_raw = sk.sign(data, sigencode=sigencode_string)
+assert isinstance(sig_raw, bytes)
+sig_der = sk.sign(data, sigencode=sigencode_der)
+assert isinstance(sig_der, bytes)
+sig_strings = sk.sign(data, sigencode=sigencode_strings)
+assert isinstance(sig_strings[0], bytes)
+
+verifiers = []
+for modifier, fun in [
+ ("bytes", lambda x: x),
+ ("bytes memoryview", lambda x: buffer(x)),
+ ("bytearray", lambda x: bytearray(x)),
+ ("bytearray memoryview", lambda x: buffer(bytearray(x))),
+ ("array.array of bytes", lambda x: array.array("B", x)),
+ ("array.array of bytes memoryview", lambda x: buffer(array.array("B", x))),
+ ("array.array of ints", lambda x: array.array("I", x)),
+ ("array.array of ints memoryview", lambda x: buffer(array.array("I", x))),
+]:
+ if "ints" in modifier:
+ conv = lambda x: x
+ else:
+ conv = fun
+ for sig_format, signature, decoder, mod_apply in [
+ ("raw", sig_raw, sigdecode_string, lambda x: conv(x)),
+ ("der", sig_der, sigdecode_der, lambda x: conv(x)),
+ (
+ "strings",
+ sig_strings,
+ sigdecode_strings,
+ lambda x: tuple(conv(i) for i in x),
+ ),
+ ]:
+ for method_name, vrf_mthd, vrf_data in [
+ ("verify", vk.verify, data),
+ ("verify_digest", vk.verify_digest, data_hash),
+ ]:
+ verifiers.append(
+ pytest.param(
+ signature,
+ decoder,
+ mod_apply,
+ fun,
+ vrf_mthd,
+ vrf_data,
+ id="{2}-{0}-{1}".format(modifier, sig_format, method_name),
+ )
+ )
+
+
+ "signature,decoder,mod_apply,fun,vrf_mthd,vrf_data", verifiers
+)
+def test_VerifyingKey_verify(
+ signature, decoder, mod_apply, fun, vrf_mthd, vrf_data
+):
+ sig = mod_apply(signature)
+
+ assert vrf_mthd(sig, fun(vrf_data), sigdecode=decoder)
+
+
+# test SigningKey.from_string()
+prv_key_bytes = (
+ b"^\xc8B\x0b\xd6\xef\x92R\xa9B\xe9\x89\x04<\xa2"
+ b"\x9fV\x1f\xa5%w\x0e\xb1\xc5"
+)
+assert len(prv_key_bytes) == 24
+converters = []
+for modifier, convert in [
+ ("bytes", lambda x: x),
+ ("bytes memoryview", buffer),
+ ("bytearray", bytearray),
+ ("bytearray memoryview", lambda x: buffer(bytearray(x))),
+ ("array.array of bytes", lambda x: array.array("B", x)),
+ ("array.array of bytes memoryview", lambda x: buffer(array.array("B", x))),
+ ("array.array of ints", lambda x: array.array("I", x)),
+ ("array.array of ints memoryview", lambda x: buffer(array.array("I", x))),
+]:
+ converters.append(pytest.param(convert, id=modifier))
+
+
[email protected]("convert", converters)
+def test_SigningKey_from_string(convert):
+ key = convert(prv_key_bytes)
+ sk = SigningKey.from_string(key)
+
+ assert sk.to_string() == prv_key_bytes
+
+
+# test SigningKey.from_der()
+prv_key_str = (
+ "-----BEGIN EC PRIVATE KEY-----\n"
+ "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n"
+ "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n"
+ "bA==\n"
+ "-----END EC PRIVATE KEY-----\n"
+)
+key_bytes = unpem(prv_key_str)
+assert isinstance(key_bytes, bytes)
+
+# last two converters are for array.array of ints, those require input
+# that's multiple of 4, which no curve we support produces
[email protected]("convert", converters[:-2])
+def test_SigningKey_from_der(convert):
+ key = convert(key_bytes)
+ sk = SigningKey.from_der(key)
+
+ assert sk.to_string() == prv_key_bytes
+
+
+# test SigningKey.sign_deterministic()
+extra_entropy = b"\x0a\x0b\x0c\x0d\x0e\x0f\x10\x11"
+
+
[email protected]("convert", converters)
+def test_SigningKey_sign_deterministic(convert):
+ sig = sk.sign_deterministic(
+ convert(data), extra_entropy=convert(extra_entropy)
+ )
+
+ vk.verify(sig, data)
+
+
+# test SigningKey.sign_digest_deterministic()
[email protected]("convert", converters)
+def test_SigningKey_sign_digest_deterministic(convert):
+ sig = sk.sign_digest_deterministic(
+ convert(data_hash), extra_entropy=convert(extra_entropy)
+ )
+
+ vk.verify(sig, data)
+
+
[email protected]("convert", converters)
+def test_SigningKey_sign(convert):
+ sig = sk.sign(convert(data))
+
+ vk.verify(sig, data)
+
+
[email protected]("convert", converters)
+def test_SigningKey_sign_digest(convert):
+ sig = sk.sign_digest(convert(data_hash))
+
+ vk.verify(sig, data)
+
+
+def test_SigningKey_with_unlikely_value():
+ sk = SigningKey.from_secret_exponent(NIST256p.order - 1, curve=NIST256p)
+ vk = sk.verifying_key
+ sig = sk.sign(b"hello")
+ assert vk.verify(sig, b"hello")
diff --git a/frozen_deps/ecdsa/test_malformed_sigs.py b/frozen_deps/ecdsa/test_malformed_sigs.py
new file mode 100644
index 0000000..4895cea
--- /dev/null
+++ b/frozen_deps/ecdsa/test_malformed_sigs.py
@@ -0,0 +1,350 @@
+from __future__ import with_statement, division
+
+import hashlib
+
+try:
+ from hashlib import algorithms_available
+except ImportError: # pragma: no cover
+ algorithms_available = [
+ "md5",
+ "sha1",
+ "sha224",
+ "sha256",
+ "sha384",
+ "sha512",
+ ]
+from functools import partial
+import pytest
+import sys
+from six import binary_type
+import hypothesis.strategies as st
+from hypothesis import note, assume, given, settings, example
+
+from .keys import SigningKey
+from .keys import BadSignatureError
+from .util import sigencode_der, sigencode_string
+from .util import sigdecode_der, sigdecode_string
+from .curves import curves, NIST256p
+from .der import (
+ encode_integer,
+ encode_bitstring,
+ encode_octet_string,
+ encode_oid,
+ encode_sequence,
+ encode_constructed,
+)
+
+
+example_data = b"some data to sign"
+"""Since the data is hashed for processing, really any string will do."""
+
+
+hash_and_size = [
+ (name, hashlib.new(name).digest_size) for name in algorithms_available
+]
+"""Pairs of hash names and their output sizes.
+Needed for pairing with curves as we don't support hashes
+bigger than order sizes of curves."""
+
+
+keys_and_sigs = []
+"""Name of the curve+hash combination, VerifyingKey and DER signature."""
+
+
+# for hypothesis strategy shrinking we want smallest curves and hashes first
+for curve in sorted(curves, key=lambda x: x.baselen):
+ for hash_alg in [
+ name
+ for name, size in sorted(hash_and_size, key=lambda x: x[1])
+ if 0 < size <= curve.baselen
+ ]:
+ sk = SigningKey.generate(
+ curve, hashfunc=partial(hashlib.new, hash_alg)
+ )
+
+ keys_and_sigs.append(
+ (
+ "{0} {1}".format(curve, hash_alg),
+ sk.verifying_key,
+ sk.sign(example_data, sigencode=sigencode_der),
+ )
+ )
+
+
+# first make sure that the signatures can be verified
+ "verifying_key,signature",
+ [pytest.param(vk, sig, id=name) for name, vk, sig in keys_and_sigs],
+)
+def test_signatures(verifying_key, signature):
+ assert verifying_key.verify(
+ signature, example_data, sigdecode=sigdecode_der
+ )
+
+
+def st_fuzzed_sig(draw, keys_and_sigs):
+ """
+ Hypothesis strategy that generates pairs of VerifyingKey and malformed
+ signatures created by fuzzing of a valid signature.
+ """
+ name, verifying_key, old_sig = draw(st.sampled_from(keys_and_sigs))
+ note("Configuration: {0}".format(name))
+
+ sig = bytearray(old_sig)
+
+ # decide which bytes should be removed
+ to_remove = draw(
+ st.lists(st.integers(min_value=0, max_value=len(sig) - 1), unique=True)
+ )
+ to_remove.sort()
+ for i in reversed(to_remove):
+ del sig[i]
+ note("Remove bytes: {0}".format(to_remove))
+
+ # decide which bytes of the original signature should be changed
+ if sig: # pragma: no branch
+ xors = draw(
+ st.dictionaries(
+ st.integers(min_value=0, max_value=len(sig) - 1),
+ st.integers(min_value=1, max_value=255),
+ )
+ )
+ for i, val in xors.items():
+ sig[i] ^= val
+ note("xors: {0}".format(xors))
+
+ # decide where new data should be inserted
+ insert_pos = draw(st.integers(min_value=0, max_value=len(sig)))
+ # NIST521p signature is about 140 bytes long, test slightly longer
+ insert_data = draw(st.binary(max_size=256))
+
+ sig = sig[:insert_pos] + insert_data + sig[insert_pos:]
+ note(
+ "Inserted at position {0} bytes: {1!r}".format(insert_pos, insert_data)
+ )
+
+ sig = bytes(sig)
+ # make sure that there was performed at least one mutation on the data
+ assume(to_remove or xors or insert_data)
+ # and that the mutations didn't cancel each-other out
+ assume(sig != old_sig)
+
+ return verifying_key, sig
+
+
+params = {}
+# not supported in hypothesis 2.0.0
+if sys.version_info >= (2, 7): # pragma: no branch
+ from hypothesis import HealthCheck
+
+ # deadline=5s because NIST521p are slow to verify
+ params["deadline"] = 5000
+ params["suppress_health_check"] = [
+ HealthCheck.data_too_large,
+ HealthCheck.filter_too_much,
+ HealthCheck.too_slow,
+ ]
+
+slow_params = dict(params)
+slow_params["max_examples"] = 10
+
+
+@settings(**params)
+@given(st_fuzzed_sig(keys_and_sigs))
+def test_fuzzed_der_signatures(args):
+ verifying_key, sig = args
+
+ with pytest.raises(BadSignatureError):
+ verifying_key.verify(sig, example_data, sigdecode=sigdecode_der)
+
+
+def st_random_der_ecdsa_sig_value(draw):
+ """
+ Hypothesis strategy for selecting random values and encoding them
+ to ECDSA-Sig-Value object::
+
+ ECDSA-Sig-Value ::= SEQUENCE {
+ r INTEGER,
+ s INTEGER
+ }
+ """
+ name, verifying_key, _ = draw(st.sampled_from(keys_and_sigs))
+ note("Configuration: {0}".format(name))
+ order = int(verifying_key.curve.order)
+
+ # the encode_integer doesn't suport negative numbers, would be nice
+ # to generate them too, but we have coverage for remove_integer()
+ # verifying that it doesn't accept them, so meh.
+ # Test all numbers around the ones that can show up (around order)
+ # way smaller and slightly bigger
+ r = draw(
+ st.integers(min_value=0, max_value=order << 4)
+ | st.integers(min_value=order >> 2, max_value=order + 1)
+ )
+ s = draw(
+ st.integers(min_value=0, max_value=order << 4)
+ | st.integers(min_value=order >> 2, max_value=order + 1)
+ )
+
+ sig = encode_sequence(encode_integer(r), encode_integer(s))
+
+ return verifying_key, sig
+
+
+@settings(**slow_params)
+@given(st_random_der_ecdsa_sig_value())
+def test_random_der_ecdsa_sig_value(params):
+ """
+ Check if random values encoded in ECDSA-Sig-Value structure are rejected
+ as signature.
+ """
+ verifying_key, sig = params
+
+ with pytest.raises(BadSignatureError):
+ verifying_key.verify(sig, example_data, sigdecode=sigdecode_der)
+
+
+def st_der_integer(*args, **kwargs):
+ """
+ Hypothesis strategy that returns a random positive integer as DER
+ INTEGER.
+ Parameters are passed to hypothesis.strategy.integer.
+ """
+ if "min_value" not in kwargs: # pragma: no branch
+ kwargs["min_value"] = 0
+ return st.builds(encode_integer, st.integers(*args, **kwargs))
+
+
+def st_der_bit_string(draw, *args, **kwargs):
+ """
+ Hypothesis strategy that returns a random DER BIT STRING.
+ Parameters are passed to hypothesis.strategy.binary.
+ """
+ data = draw(st.binary(*args, **kwargs))
+ if data:
+ unused = draw(st.integers(min_value=0, max_value=7))
+ data = bytearray(data)
+ data[-1] &= -(2 ** unused)
+ data = bytes(data)
+ else:
+ unused = 0
+ return encode_bitstring(data, unused)
+
+
+def st_der_octet_string(*args, **kwargs):
+ """
+ Hypothesis strategy that returns a random DER OCTET STRING object.
+ Parameters are passed to hypothesis.strategy.binary
+ """
+ return st.builds(encode_octet_string, st.binary(*args, **kwargs))
+
+
+def st_der_null():
+ """
+ Hypothesis strategy that returns DER NULL object.
+ """
+ return st.just(b"\x05\x00")
+
+
+def st_der_oid(draw):
+ """
+ Hypothesis strategy that returns DER OBJECT IDENTIFIER objects.
+ """
+ first = draw(st.integers(min_value=0, max_value=2))
+ if first < 2:
+ second = draw(st.integers(min_value=0, max_value=39))
+ else:
+ second = draw(st.integers(min_value=0, max_value=2 ** 512))
+ rest = draw(
+ st.lists(st.integers(min_value=0, max_value=2 ** 512), max_size=50)
+ )
+ return encode_oid(first, second, *rest)
+
+
+def st_der():
+ """
+ Hypothesis strategy that returns random DER structures.
+
+ A valid DER structure is any primitive object, an octet encoding
+ of a valid DER structure, sequence of valid DER objects or a constructed
+ encoding of any of the above.
+ """
+ return st.recursive(
+ st.just(b"")
+ | st_der_integer(max_value=2 ** 4096)
+ | st_der_bit_string(max_size=1024 ** 2)
+ | st_der_octet_string(max_size=1024 ** 2)
+ | st_der_null()
+ | st_der_oid(),
+ lambda children: st.builds(
+ lambda x: encode_octet_string(x), st.one_of(children)
+ )
+ | st.builds(lambda x: encode_bitstring(x, 0), st.one_of(children))
+ | st.builds(
+ lambda x: encode_sequence(*x), st.lists(children, max_size=200)
+ )
+ | st.builds(
+ lambda tag, x: encode_constructed(tag, x),
+ st.integers(min_value=0, max_value=0x3F),
+ st.one_of(children),
+ ),
+ max_leaves=40,
+ )
+
+
+@settings(**params)
+@given(st.sampled_from(keys_and_sigs), st_der())
+def test_random_der_as_signature(params, der):
+ """Check if random DER structures are rejected as signature"""
+ name, verifying_key, _ = params
+
+ with pytest.raises(BadSignatureError):
+ verifying_key.verify(der, example_data, sigdecode=sigdecode_der)
+
+
+@settings(**params)
+@given(st.sampled_from(keys_and_sigs), st.binary(max_size=1024 ** 2))
+@example(
+ keys_and_sigs[0], encode_sequence(encode_integer(0), encode_integer(0))
+)
+@example(
+ keys_and_sigs[0],
+ encode_sequence(encode_integer(1), encode_integer(1)) + b"\x00",
+)
+@example(keys_and_sigs[0], encode_sequence(*[encode_integer(1)] * 3))
+def test_random_bytes_as_signature(params, der):
+ """Check if random bytes are rejected as signature"""
+ name, verifying_key, _ = params
+
+ with pytest.raises(BadSignatureError):
+ verifying_key.verify(der, example_data, sigdecode=sigdecode_der)
+
+
+keys_and_string_sigs = [
+ (
+ name,
+ verifying_key,
+ sigencode_string(
+ *sigdecode_der(sig, verifying_key.curve.order),
+ order=verifying_key.curve.order
+ ),
+ )
+ for name, verifying_key, sig in keys_and_sigs
+]
+"""
+Name of the curve+hash combination, VerifyingKey and signature as a
+byte string.
+"""
+
+
+@settings(**params)
+@given(st_fuzzed_sig(keys_and_string_sigs))
+def test_fuzzed_string_signatures(params):
+ verifying_key, sig = params
+
+ with pytest.raises(BadSignatureError):
+ verifying_key.verify(sig, example_data, sigdecode=sigdecode_string)
diff --git a/frozen_deps/ecdsa/test_numbertheory.py b/frozen_deps/ecdsa/test_numbertheory.py
new file mode 100644
index 0000000..4912c57
--- /dev/null
+++ b/frozen_deps/ecdsa/test_numbertheory.py
@@ -0,0 +1,326 @@
+import operator
+from six import print_
+from functools import reduce
+import operator
+
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+import hypothesis.strategies as st
+import pytest
+from hypothesis import given, settings, example
+
+try:
+ from hypothesis import HealthCheck
+
+ HC_PRESENT = True
+except ImportError: # pragma: no cover
+ HC_PRESENT = False
+from .numbertheory import (
+ SquareRootError,
+ factorization,
+ gcd,
+ lcm,
+ jacobi,
+ inverse_mod,
+ is_prime,
+ next_prime,
+ smallprimes,
+ square_root_mod_prime,
+)
+
+
+BIGPRIMES = (
+ 999671,
+ 999683,
+ 999721,
+ 999727,
+ 999749,
+ 999763,
+ 999769,
+ 999773,
+ 999809,
+ 999853,
+ 999863,
+ 999883,
+ 999907,
+ 999917,
+ 999931,
+ 999953,
+ 999959,
+ 999961,
+ 999979,
+ 999983,
+)
+
+
+ "prime, next_p", [(p, q) for p, q in zip(BIGPRIMES[:-1], BIGPRIMES[1:])]
+)
+def test_next_prime(prime, next_p):
+ assert next_prime(prime) == next_p
+
+
[email protected]("val", [-1, 0, 1])
+def test_next_prime_with_nums_less_2(val):
+ assert next_prime(val) == 2
+
+
[email protected]("prime", smallprimes)
+def test_square_root_mod_prime_for_small_primes(prime):
+ squares = set()
+ for num in range(0, 1 + prime // 2):
+ sq = num * num % prime
+ squares.add(sq)
+ root = square_root_mod_prime(sq, prime)
+ # tested for real with TestNumbertheory.test_square_root_mod_prime
+ assert root * root % prime == sq
+
+ for nonsquare in range(0, prime):
+ if nonsquare in squares:
+ continue
+ with pytest.raises(SquareRootError):
+ square_root_mod_prime(nonsquare, prime)
+
+
+def st_two_nums_rel_prime(draw):
+ # 521-bit is the biggest curve we operate on, use 1024 for a bit
+ # of breathing space
+ mod = draw(st.integers(min_value=2, max_value=2 ** 1024))
+ num = draw(
+ st.integers(min_value=1, max_value=mod - 1).filter(
+ lambda x: gcd(x, mod) == 1
+ )
+ )
+ return num, mod
+
+
+def st_primes(draw, *args, **kwargs):
+ if "min_value" not in kwargs: # pragma: no branch
+ kwargs["min_value"] = 1
+ prime = draw(
+ st.sampled_from(smallprimes)
+ | st.integers(*args, **kwargs).filter(is_prime)
+ )
+ return prime
+
+
+def st_num_square_prime(draw):
+ prime = draw(st_primes(max_value=2 ** 1024))
+ num = draw(st.integers(min_value=0, max_value=1 + prime // 2))
+ sq = num * num % prime
+ return sq, prime
+
+
+def st_comp_with_com_fac(draw):
+ """
+ Strategy that returns lists of numbers, all having a common factor.
+ """
+ primes = draw(
+ st.lists(st_primes(max_value=2 ** 512), min_size=1, max_size=10)
+ )
+ # select random prime(s) that will make the common factor of composites
+ com_fac_primes = draw(
+ st.lists(st.sampled_from(primes), min_size=1, max_size=20)
+ )
+ com_fac = reduce(operator.mul, com_fac_primes, 1)
+
+ # select at most 20 lists (returned numbers),
+ # each having at most 30 primes (factors) including none (then the number
+ # will be 1)
+ comp_primes = draw(
+ st.integers(min_value=1, max_value=20).flatmap(
+ lambda n: st.lists(
+ st.lists(st.sampled_from(primes), max_size=30),
+ min_size=1,
+ max_size=n,
+ )
+ )
+ )
+
+ return [reduce(operator.mul, nums, 1) * com_fac for nums in comp_primes]
+
+
+def st_comp_no_com_fac(draw):
+ """
+ Strategy that returns lists of numbers that don't have a common factor.
+ """
+ primes = draw(
+ st.lists(
+ st_primes(max_value=2 ** 512), min_size=2, max_size=10, unique=True
+ )
+ )
+ # first select the primes that will create the uncommon factor
+ # between returned numbers
+ uncom_fac_primes = draw(
+ st.lists(
+ st.sampled_from(primes),
+ min_size=1,
+ max_size=len(primes) - 1,
+ unique=True,
+ )
+ )
+ uncom_fac = reduce(operator.mul, uncom_fac_primes, 1)
+
+ # then build composites from leftover primes
+ leftover_primes = [i for i in primes if i not in uncom_fac_primes]
+
+ assert leftover_primes
+ assert uncom_fac_primes
+
+ # select at most 20 lists, each having at most 30 primes
+ # selected from the leftover_primes list
+ number_primes = draw(
+ st.integers(min_value=1, max_value=20).flatmap(
+ lambda n: st.lists(
+ st.lists(st.sampled_from(leftover_primes), max_size=30),
+ min_size=1,
+ max_size=n,
+ )
+ )
+ )
+
+ numbers = [reduce(operator.mul, nums, 1) for nums in number_primes]
+
+ insert_at = draw(st.integers(min_value=0, max_value=len(numbers)))
+ numbers.insert(insert_at, uncom_fac)
+ return numbers
+
+
+HYP_SETTINGS = {}
+if HC_PRESENT: # pragma: no branch
+ HYP_SETTINGS["suppress_health_check"] = [
+ HealthCheck.filter_too_much,
+ HealthCheck.too_slow,
+ ]
+ # the factorization() sometimes takes a long time to finish
+ HYP_SETTINGS["deadline"] = 5000
+
+
+HYP_SLOW_SETTINGS = dict(HYP_SETTINGS)
+HYP_SLOW_SETTINGS["max_examples"] = 10
+
+
+class TestNumbertheory(unittest.TestCase):
+ def test_gcd(self):
+ assert gcd(3 * 5 * 7, 3 * 5 * 11, 3 * 5 * 13) == 3 * 5
+ assert gcd([3 * 5 * 7, 3 * 5 * 11, 3 * 5 * 13]) == 3 * 5
+ assert gcd(3) == 3
+
+ @unittest.skipUnless(
+ HC_PRESENT,
+ "Hypothesis 2.0.0 can't be made tolerant of hard to "
+ "meet requirements (like `is_prime()`), the test "
+ "case times-out on it",
+ )
+ @settings(**HYP_SLOW_SETTINGS)
+ @given(st_comp_with_com_fac())
+ def test_gcd_with_com_factor(self, numbers):
+ n = gcd(numbers)
+ assert 1 in numbers or n != 1
+ for i in numbers:
+ assert i % n == 0
+
+ @unittest.skipUnless(
+ HC_PRESENT,
+ "Hypothesis 2.0.0 can't be made tolerant of hard to "
+ "meet requirements (like `is_prime()`), the test "
+ "case times-out on it",
+ )
+ @settings(**HYP_SLOW_SETTINGS)
+ @given(st_comp_no_com_fac())
+ def test_gcd_with_uncom_factor(self, numbers):
+ n = gcd(numbers)
+ assert n == 1
+
+ @given(
+ st.lists(
+ st.integers(min_value=1, max_value=2 ** 8192),
+ min_size=1,
+ max_size=20,
+ )
+ )
+ def test_gcd_with_random_numbers(self, numbers):
+ n = gcd(numbers)
+ for i in numbers:
+ # check that at least it's a divider
+ assert i % n == 0
+
+ def test_lcm(self):
+ assert lcm(3, 5 * 3, 7 * 3) == 3 * 5 * 7
+ assert lcm([3, 5 * 3, 7 * 3]) == 3 * 5 * 7
+ assert lcm(3) == 3
+
+ @given(
+ st.lists(
+ st.integers(min_value=1, max_value=2 ** 8192),
+ min_size=1,
+ max_size=20,
+ )
+ )
+ def test_lcm_with_random_numbers(self, numbers):
+ n = lcm(numbers)
+ for i in numbers:
+ assert n % i == 0
+
+ @unittest.skipUnless(
+ HC_PRESENT,
+ "Hypothesis 2.0.0 can't be made tolerant of hard to "
+ "meet requirements (like `is_prime()`), the test "
+ "case times-out on it",
+ )
+ @settings(**HYP_SETTINGS)
+ @given(st_num_square_prime())
+ def test_square_root_mod_prime(self, vals):
+ square, prime = vals
+
+ calc = square_root_mod_prime(square, prime)
+ assert calc * calc % prime == square
+
+ @settings(**HYP_SETTINGS)
+ @given(st.integers(min_value=1, max_value=10 ** 12))
+ @example(265399 * 1526929)
+ @example(373297 ** 2 * 553991)
+ def test_factorization(self, num):
+ factors = factorization(num)
+ mult = 1
+ for i in factors:
+ mult *= i[0] ** i[1]
+ assert mult == num
+
+ @settings(**HYP_SETTINGS)
+ @given(st.integers(min_value=3, max_value=1000).filter(lambda x: x % 2))
+ def test_jacobi(self, mod):
+ if is_prime(mod):
+ squares = set()
+ for root in range(1, mod):
+ assert jacobi(root * root, mod) == 1
+ squares.add(root * root % mod)
+ for i in range(1, mod):
+ if i not in squares:
+ assert jacobi(i, mod) == -1
+ else:
+ factors = factorization(mod)
+ for a in range(1, mod):
+ c = 1
+ for i in factors:
+ c *= jacobi(a, i[0]) ** i[1]
+ assert c == jacobi(a, mod)
+
+ @given(st_two_nums_rel_prime())
+ def test_inverse_mod(self, nums):
+ num, mod = nums
+
+ inv = inverse_mod(num, mod)
+
+ assert 0 < inv < mod
+ assert num * inv % mod == 1
+
+ def test_inverse_mod_with_zero(self):
+ assert 0 == inverse_mod(0, 11)
diff --git a/frozen_deps/ecdsa/test_pyecdsa.py b/frozen_deps/ecdsa/test_pyecdsa.py
new file mode 100644
index 0000000..65b6716
--- /dev/null
+++ b/frozen_deps/ecdsa/test_pyecdsa.py
@@ -0,0 +1,1989 @@
+from __future__ import with_statement, division
+
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+import os
+import time
+import shutil
+import subprocess
+import pytest
+from binascii import hexlify, unhexlify
+from hashlib import sha1, sha256, sha384, sha512
+import hashlib
+from functools import partial
+
+from hypothesis import given
+import hypothesis.strategies as st
+
+from six import b, print_, binary_type
+from .keys import SigningKey, VerifyingKey
+from .keys import BadSignatureError, MalformedPointError, BadDigestError
+from . import util
+from .util import sigencode_der, sigencode_strings
+from .util import sigdecode_der, sigdecode_strings
+from .util import number_to_string, encoded_oid_ecPublicKey, MalformedSignature
+from .curves import Curve, UnknownCurveError
+from .curves import (
+ NIST192p,
+ NIST224p,
+ NIST256p,
+ NIST384p,
+ NIST521p,
+ SECP256k1,
+ BRAINPOOLP160r1,
+ BRAINPOOLP192r1,
+ BRAINPOOLP224r1,
+ BRAINPOOLP256r1,
+ BRAINPOOLP320r1,
+ BRAINPOOLP384r1,
+ BRAINPOOLP512r1,
+ curves,
+)
+from .ecdsa import (
+ curve_brainpoolp224r1,
+ curve_brainpoolp256r1,
+ curve_brainpoolp384r1,
+ curve_brainpoolp512r1,
+)
+from .ellipticcurve import Point
+from . import der
+from . import rfc6979
+from . import ecdsa
+
+
+class SubprocessError(Exception):
+ pass
+
+
+def run_openssl(cmd):
+ OPENSSL = "openssl"
+ p = subprocess.Popen(
+ [OPENSSL] + cmd.split(),
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ )
+ stdout, ignored = p.communicate()
+ if p.returncode != 0:
+ raise SubprocessError(
+ "cmd '%s %s' failed: rc=%s, stdout/err was %s"
+ % (OPENSSL, cmd, p.returncode, stdout)
+ )
+ return stdout.decode()
+
+
+class ECDSA(unittest.TestCase):
+ def test_basic(self):
+ priv = SigningKey.generate()
+ pub = priv.get_verifying_key()
+
+ data = b("blahblah")
+ sig = priv.sign(data)
+
+ self.assertTrue(pub.verify(sig, data))
+ self.assertRaises(BadSignatureError, pub.verify, sig, data + b("bad"))
+
+ pub2 = VerifyingKey.from_string(pub.to_string())
+ self.assertTrue(pub2.verify(sig, data))
+
+ def test_deterministic(self):
+ data = b("blahblah")
+ secexp = int("9d0219792467d7d37b4d43298a7d0c05", 16)
+
+ priv = SigningKey.from_secret_exponent(secexp, SECP256k1, sha256)
+ pub = priv.get_verifying_key()
+
+ k = rfc6979.generate_k(
+ SECP256k1.generator.order(), secexp, sha256, sha256(data).digest()
+ )
+
+ sig1 = priv.sign(data, k=k)
+ self.assertTrue(pub.verify(sig1, data))
+
+ sig2 = priv.sign(data, k=k)
+ self.assertTrue(pub.verify(sig2, data))
+
+ sig3 = priv.sign_deterministic(data, sha256)
+ self.assertTrue(pub.verify(sig3, data))
+
+ self.assertEqual(sig1, sig2)
+ self.assertEqual(sig1, sig3)
+
+ def test_bad_usage(self):
+ # sk=SigningKey() is wrong
+ self.assertRaises(TypeError, SigningKey)
+ self.assertRaises(TypeError, VerifyingKey)
+
+ def test_lengths(self):
+ default = NIST192p
+ priv = SigningKey.generate()
+ pub = priv.get_verifying_key()
+ self.assertEqual(len(pub.to_string()), default.verifying_key_length)
+ sig = priv.sign(b("data"))
+ self.assertEqual(len(sig), default.signature_length)
+ for curve in (
+ NIST192p,
+ NIST224p,
+ NIST256p,
+ NIST384p,
+ NIST521p,
+ BRAINPOOLP160r1,
+ BRAINPOOLP192r1,
+ BRAINPOOLP224r1,
+ BRAINPOOLP256r1,
+ BRAINPOOLP320r1,
+ BRAINPOOLP384r1,
+ BRAINPOOLP512r1,
+ ):
+ start = time.time()
+ priv = SigningKey.generate(curve=curve)
+ pub1 = priv.get_verifying_key()
+ keygen_time = time.time() - start
+ pub2 = VerifyingKey.from_string(pub1.to_string(), curve)
+ self.assertEqual(pub1.to_string(), pub2.to_string())
+ self.assertEqual(len(pub1.to_string()), curve.verifying_key_length)
+ start = time.time()
+ sig = priv.sign(b("data"))
+ sign_time = time.time() - start
+ self.assertEqual(len(sig), curve.signature_length)
+
+ def test_serialize(self):
+ seed = b("secret")
+ curve = NIST192p
+ secexp1 = util.randrange_from_seed__trytryagain(seed, curve.order)
+ secexp2 = util.randrange_from_seed__trytryagain(seed, curve.order)
+ self.assertEqual(secexp1, secexp2)
+ priv1 = SigningKey.from_secret_exponent(secexp1, curve)
+ priv2 = SigningKey.from_secret_exponent(secexp2, curve)
+ self.assertEqual(
+ hexlify(priv1.to_string()), hexlify(priv2.to_string())
+ )
+ self.assertEqual(priv1.to_pem(), priv2.to_pem())
+ pub1 = priv1.get_verifying_key()
+ pub2 = priv2.get_verifying_key()
+ data = b("data")
+ sig1 = priv1.sign(data)
+ sig2 = priv2.sign(data)
+ self.assertTrue(pub1.verify(sig1, data))
+ self.assertTrue(pub2.verify(sig1, data))
+ self.assertTrue(pub1.verify(sig2, data))
+ self.assertTrue(pub2.verify(sig2, data))
+ self.assertEqual(hexlify(pub1.to_string()), hexlify(pub2.to_string()))
+
+ def test_nonrandom(self):
+ s = b("all the entropy in the entire world, compressed into one line")
+
+ def not_much_entropy(numbytes):
+ return s[:numbytes]
+
+ # we control the entropy source, these two keys should be identical:
+ priv1 = SigningKey.generate(entropy=not_much_entropy)
+ priv2 = SigningKey.generate(entropy=not_much_entropy)
+ self.assertEqual(
+ hexlify(priv1.get_verifying_key().to_string()),
+ hexlify(priv2.get_verifying_key().to_string()),
+ )
+ # likewise, signatures should be identical. Obviously you'd never
+ # want to do this with keys you care about, because the secrecy of
+ # the private key depends upon using different random numbers for
+ # each signature
+ sig1 = priv1.sign(b("data"), entropy=not_much_entropy)
+ sig2 = priv2.sign(b("data"), entropy=not_much_entropy)
+ self.assertEqual(hexlify(sig1), hexlify(sig2))
+
+ def assertTruePrivkeysEqual(self, priv1, priv2):
+ self.assertEqual(
+ priv1.privkey.secret_multiplier, priv2.privkey.secret_multiplier
+ )
+ self.assertEqual(
+ priv1.privkey.public_key.generator,
+ priv2.privkey.public_key.generator,
+ )
+
+ def test_privkey_creation(self):
+ s = b("all the entropy in the entire world, compressed into one line")
+
+ def not_much_entropy(numbytes):
+ return s[:numbytes]
+
+ priv1 = SigningKey.generate()
+ self.assertEqual(priv1.baselen, NIST192p.baselen)
+
+ priv1 = SigningKey.generate(curve=NIST224p)
+ self.assertEqual(priv1.baselen, NIST224p.baselen)
+
+ priv1 = SigningKey.generate(entropy=not_much_entropy)
+ self.assertEqual(priv1.baselen, NIST192p.baselen)
+ priv2 = SigningKey.generate(entropy=not_much_entropy)
+ self.assertEqual(priv2.baselen, NIST192p.baselen)
+ self.assertTruePrivkeysEqual(priv1, priv2)
+
+ priv1 = SigningKey.from_secret_exponent(secexp=3)
+ self.assertEqual(priv1.baselen, NIST192p.baselen)
+ priv2 = SigningKey.from_secret_exponent(secexp=3)
+ self.assertTruePrivkeysEqual(priv1, priv2)
+
+ priv1 = SigningKey.from_secret_exponent(secexp=4, curve=NIST224p)
+ self.assertEqual(priv1.baselen, NIST224p.baselen)
+
+ def test_privkey_strings(self):
+ priv1 = SigningKey.generate()
+ s1 = priv1.to_string()
+ self.assertEqual(type(s1), binary_type)
+ self.assertEqual(len(s1), NIST192p.baselen)
+ priv2 = SigningKey.from_string(s1)
+ self.assertTruePrivkeysEqual(priv1, priv2)
+
+ s1 = priv1.to_pem()
+ self.assertEqual(type(s1), binary_type)
+ self.assertTrue(s1.startswith(b("-----BEGIN EC PRIVATE KEY-----")))
+ self.assertTrue(s1.strip().endswith(b("-----END EC PRIVATE KEY-----")))
+ priv2 = SigningKey.from_pem(s1)
+ self.assertTruePrivkeysEqual(priv1, priv2)
+
+ s1 = priv1.to_der()
+ self.assertEqual(type(s1), binary_type)
+ priv2 = SigningKey.from_der(s1)
+ self.assertTruePrivkeysEqual(priv1, priv2)
+
+ priv1 = SigningKey.generate(curve=NIST256p)
+ s1 = priv1.to_pem()
+ self.assertEqual(type(s1), binary_type)
+ self.assertTrue(s1.startswith(b("-----BEGIN EC PRIVATE KEY-----")))
+ self.assertTrue(s1.strip().endswith(b("-----END EC PRIVATE KEY-----")))
+ priv2 = SigningKey.from_pem(s1)
+ self.assertTruePrivkeysEqual(priv1, priv2)
+
+ s1 = priv1.to_der()
+ self.assertEqual(type(s1), binary_type)
+ priv2 = SigningKey.from_der(s1)
+ self.assertTruePrivkeysEqual(priv1, priv2)
+
+ def test_privkey_strings_brainpool(self):
+ priv1 = SigningKey.generate(curve=BRAINPOOLP512r1)
+ s1 = priv1.to_pem()
+ self.assertEqual(type(s1), binary_type)
+ self.assertTrue(s1.startswith(b("-----BEGIN EC PRIVATE KEY-----")))
+ self.assertTrue(s1.strip().endswith(b("-----END EC PRIVATE KEY-----")))
+ priv2 = SigningKey.from_pem(s1)
+ self.assertTruePrivkeysEqual(priv1, priv2)
+
+ s1 = priv1.to_der()
+ self.assertEqual(type(s1), binary_type)
+ priv2 = SigningKey.from_der(s1)
+ self.assertTruePrivkeysEqual(priv1, priv2)
+
+ def assertTruePubkeysEqual(self, pub1, pub2):
+ self.assertEqual(pub1.pubkey.point, pub2.pubkey.point)
+ self.assertEqual(pub1.pubkey.generator, pub2.pubkey.generator)
+ self.assertEqual(pub1.curve, pub2.curve)
+
+ def test_pubkey_strings(self):
+ priv1 = SigningKey.generate()
+ pub1 = priv1.get_verifying_key()
+ s1 = pub1.to_string()
+ self.assertEqual(type(s1), binary_type)
+ self.assertEqual(len(s1), NIST192p.verifying_key_length)
+ pub2 = VerifyingKey.from_string(s1)
+ self.assertTruePubkeysEqual(pub1, pub2)
+
+ priv1 = SigningKey.generate(curve=NIST256p)
+ pub1 = priv1.get_verifying_key()
+ s1 = pub1.to_string()
+ self.assertEqual(type(s1), binary_type)
+ self.assertEqual(len(s1), NIST256p.verifying_key_length)
+ pub2 = VerifyingKey.from_string(s1, curve=NIST256p)
+ self.assertTruePubkeysEqual(pub1, pub2)
+
+ pub1_der = pub1.to_der()
+ self.assertEqual(type(pub1_der), binary_type)
+ pub2 = VerifyingKey.from_der(pub1_der)
+ self.assertTruePubkeysEqual(pub1, pub2)
+
+ self.assertRaises(
+ der.UnexpectedDER, VerifyingKey.from_der, pub1_der + b("junk")
+ )
+ badpub = VerifyingKey.from_der(pub1_der)
+
+ class FakeGenerator:
+ def order(self):
+ return 123456789
+
+ badcurve = Curve(
+ "unknown", None, FakeGenerator(), (1, 2, 3, 4, 5, 6), None
+ )
+ badpub.curve = badcurve
+ badder = badpub.to_der()
+ self.assertRaises(UnknownCurveError, VerifyingKey.from_der, badder)
+
+ pem = pub1.to_pem()
+ self.assertEqual(type(pem), binary_type)
+ self.assertTrue(pem.startswith(b("-----BEGIN PUBLIC KEY-----")), pem)
+ self.assertTrue(
+ pem.strip().endswith(b("-----END PUBLIC KEY-----")), pem
+ )
+ pub2 = VerifyingKey.from_pem(pem)
+ self.assertTruePubkeysEqual(pub1, pub2)
+
+ def test_pubkey_strings_brainpool(self):
+ priv1 = SigningKey.generate(curve=BRAINPOOLP512r1)
+ pub1 = priv1.get_verifying_key()
+ s1 = pub1.to_string()
+ self.assertEqual(type(s1), binary_type)
+ self.assertEqual(len(s1), BRAINPOOLP512r1.verifying_key_length)
+ pub2 = VerifyingKey.from_string(s1, curve=BRAINPOOLP512r1)
+ self.assertTruePubkeysEqual(pub1, pub2)
+
+ pub1_der = pub1.to_der()
+ self.assertEqual(type(pub1_der), binary_type)
+ pub2 = VerifyingKey.from_der(pub1_der)
+ self.assertTruePubkeysEqual(pub1, pub2)
+
+ def test_vk_to_der_with_invalid_point_encoding(self):
+ sk = SigningKey.generate()
+ vk = sk.verifying_key
+
+ with self.assertRaises(ValueError):
+ vk.to_der("raw")
+
+ def test_sk_to_der_with_invalid_point_encoding(self):
+ sk = SigningKey.generate()
+
+ with self.assertRaises(ValueError):
+ sk.to_der("raw")
+
+ def test_vk_from_der_garbage_after_curve_oid(self):
+ type_oid_der = encoded_oid_ecPublicKey
+ curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) + b(
+ "garbage"
+ )
+ enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der)
+ point_der = der.encode_bitstring(b"\x00\xff", None)
+ to_decode = der.encode_sequence(enc_type_der, point_der)
+
+ with self.assertRaises(der.UnexpectedDER):
+ VerifyingKey.from_der(to_decode)
+
+ def test_vk_from_der_invalid_key_type(self):
+ type_oid_der = der.encode_oid(*(1, 2, 3))
+ curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1))
+ enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der)
+ point_der = der.encode_bitstring(b"\x00\xff", None)
+ to_decode = der.encode_sequence(enc_type_der, point_der)
+
+ with self.assertRaises(der.UnexpectedDER):
+ VerifyingKey.from_der(to_decode)
+
+ def test_vk_from_der_garbage_after_point_string(self):
+ type_oid_der = encoded_oid_ecPublicKey
+ curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1))
+ enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der)
+ point_der = der.encode_bitstring(b"\x00\xff", None) + b("garbage")
+ to_decode = der.encode_sequence(enc_type_der, point_der)
+
+ with self.assertRaises(der.UnexpectedDER):
+ VerifyingKey.from_der(to_decode)
+
+ def test_vk_from_der_invalid_bitstring(self):
+ type_oid_der = encoded_oid_ecPublicKey
+ curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1))
+ enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der)
+ point_der = der.encode_bitstring(b"\x08\xff", None)
+ to_decode = der.encode_sequence(enc_type_der, point_der)
+
+ with self.assertRaises(der.UnexpectedDER):
+ VerifyingKey.from_der(to_decode)
+
+ def test_vk_from_der_with_invalid_length_of_encoding(self):
+ type_oid_der = encoded_oid_ecPublicKey
+ curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1))
+ enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der)
+ point_der = der.encode_bitstring(b"\xff" * 64, 0)
+ to_decode = der.encode_sequence(enc_type_der, point_der)
+
+ with self.assertRaises(MalformedPointError):
+ VerifyingKey.from_der(to_decode)
+
+ def test_vk_from_der_with_raw_encoding(self):
+ type_oid_der = encoded_oid_ecPublicKey
+ curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1))
+ enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der)
+ point_der = der.encode_bitstring(b"\xff" * 48, 0)
+ to_decode = der.encode_sequence(enc_type_der, point_der)
+
+ with self.assertRaises(der.UnexpectedDER):
+ VerifyingKey.from_der(to_decode)
+
+ def test_signature_strings(self):
+ priv1 = SigningKey.generate()
+ pub1 = priv1.get_verifying_key()
+ data = b("data")
+
+ sig = priv1.sign(data)
+ self.assertEqual(type(sig), binary_type)
+ self.assertEqual(len(sig), NIST192p.signature_length)
+ self.assertTrue(pub1.verify(sig, data))
+
+ sig = priv1.sign(data, sigencode=sigencode_strings)
+ self.assertEqual(type(sig), tuple)
+ self.assertEqual(len(sig), 2)
+ self.assertEqual(type(sig[0]), binary_type)
+ self.assertEqual(type(sig[1]), binary_type)
+ self.assertEqual(len(sig[0]), NIST192p.baselen)
+ self.assertEqual(len(sig[1]), NIST192p.baselen)
+ self.assertTrue(pub1.verify(sig, data, sigdecode=sigdecode_strings))
+
+ sig_der = priv1.sign(data, sigencode=sigencode_der)
+ self.assertEqual(type(sig_der), binary_type)
+ self.assertTrue(pub1.verify(sig_der, data, sigdecode=sigdecode_der))
+
+ def test_sig_decode_strings_with_invalid_count(self):
+ with self.assertRaises(MalformedSignature):
+ sigdecode_strings([b("one"), b("two"), b("three")], 0xFF)
+
+ def test_sig_decode_strings_with_wrong_r_len(self):
+ with self.assertRaises(MalformedSignature):
+ sigdecode_strings([b("one"), b("two")], 0xFF)
+
+ def test_sig_decode_strings_with_wrong_s_len(self):
+ with self.assertRaises(MalformedSignature):
+ sigdecode_strings([b("\xa0"), b("\xb0\xff")], 0xFF)
+
+ def test_verify_with_too_long_input(self):
+ sk = SigningKey.generate()
+ vk = sk.verifying_key
+
+ with self.assertRaises(BadDigestError):
+ vk.verify_digest(None, b("\x00") * 128)
+
+ def test_sk_from_secret_exponent_with_wrong_sec_exponent(self):
+ with self.assertRaises(MalformedPointError):
+ SigningKey.from_secret_exponent(0)
+
+ def test_sk_from_string_with_wrong_len_string(self):
+ with self.assertRaises(MalformedPointError):
+ SigningKey.from_string(b("\x01"))
+
+ def test_sk_from_der_with_junk_after_sequence(self):
+ ver_der = der.encode_integer(1)
+ to_decode = der.encode_sequence(ver_der) + b("garbage")
+
+ with self.assertRaises(der.UnexpectedDER):
+ SigningKey.from_der(to_decode)
+
+ def test_sk_from_der_with_wrong_version(self):
+ ver_der = der.encode_integer(0)
+ to_decode = der.encode_sequence(ver_der)
+
+ with self.assertRaises(der.UnexpectedDER):
+ SigningKey.from_der(to_decode)
+
+ def test_sk_from_der_invalid_const_tag(self):
+ ver_der = der.encode_integer(1)
+ privkey_der = der.encode_octet_string(b("\x00\xff"))
+ curve_oid_der = der.encode_oid(*(1, 2, 3))
+ const_der = der.encode_constructed(1, curve_oid_der)
+ to_decode = der.encode_sequence(
+ ver_der, privkey_der, const_der, curve_oid_der
+ )
+
+ with self.assertRaises(der.UnexpectedDER):
+ SigningKey.from_der(to_decode)
+
+ def test_sk_from_der_garbage_after_privkey_oid(self):
+ ver_der = der.encode_integer(1)
+ privkey_der = der.encode_octet_string(b("\x00\xff"))
+ curve_oid_der = der.encode_oid(*(1, 2, 3)) + b("garbage")
+ const_der = der.encode_constructed(0, curve_oid_der)
+ to_decode = der.encode_sequence(
+ ver_der, privkey_der, const_der, curve_oid_der
+ )
+
+ with self.assertRaises(der.UnexpectedDER):
+ SigningKey.from_der(to_decode)
+
+ def test_sk_from_der_with_short_privkey(self):
+ ver_der = der.encode_integer(1)
+ privkey_der = der.encode_octet_string(b("\x00\xff"))
+ curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1))
+ const_der = der.encode_constructed(0, curve_oid_der)
+ to_decode = der.encode_sequence(
+ ver_der, privkey_der, const_der, curve_oid_der
+ )
+
+ sk = SigningKey.from_der(to_decode)
+ self.assertEqual(sk.privkey.secret_multiplier, 255)
+
+ def test_sk_from_p8_der_with_wrong_version(self):
+ ver_der = der.encode_integer(2)
+ algorithm_der = der.encode_sequence(
+ der.encode_oid(1, 2, 840, 10045, 2, 1),
+ der.encode_oid(1, 2, 840, 10045, 3, 1, 1),
+ )
+ privkey_der = der.encode_octet_string(
+ der.encode_sequence(
+ der.encode_integer(1), der.encode_octet_string(b"\x00\xff")
+ )
+ )
+ to_decode = der.encode_sequence(ver_der, algorithm_der, privkey_der)
+
+ with self.assertRaises(der.UnexpectedDER):
+ SigningKey.from_der(to_decode)
+
+ def test_sk_from_p8_der_with_wrong_algorithm(self):
+ ver_der = der.encode_integer(1)
+ algorithm_der = der.encode_sequence(
+ der.encode_oid(1, 2, 3), der.encode_oid(1, 2, 840, 10045, 3, 1, 1)
+ )
+ privkey_der = der.encode_octet_string(
+ der.encode_sequence(
+ der.encode_integer(1), der.encode_octet_string(b"\x00\xff")
+ )
+ )
+ to_decode = der.encode_sequence(ver_der, algorithm_der, privkey_der)
+
+ with self.assertRaises(der.UnexpectedDER):
+ SigningKey.from_der(to_decode)
+
+ def test_sk_from_p8_der_with_trailing_junk_after_algorithm(self):
+ ver_der = der.encode_integer(1)
+ algorithm_der = der.encode_sequence(
+ der.encode_oid(1, 2, 840, 10045, 2, 1),
+ der.encode_oid(1, 2, 840, 10045, 3, 1, 1),
+ der.encode_octet_string(b"junk"),
+ )
+ privkey_der = der.encode_octet_string(
+ der.encode_sequence(
+ der.encode_integer(1), der.encode_octet_string(b"\x00\xff")
+ )
+ )
+ to_decode = der.encode_sequence(ver_der, algorithm_der, privkey_der)
+
+ with self.assertRaises(der.UnexpectedDER):
+ SigningKey.from_der(to_decode)
+
+ def test_sk_from_p8_der_with_trailing_junk_after_key(self):
+ ver_der = der.encode_integer(1)
+ algorithm_der = der.encode_sequence(
+ der.encode_oid(1, 2, 840, 10045, 2, 1),
+ der.encode_oid(1, 2, 840, 10045, 3, 1, 1),
+ )
+ privkey_der = der.encode_octet_string(
+ der.encode_sequence(
+ der.encode_integer(1), der.encode_octet_string(b"\x00\xff")
+ )
+ + der.encode_integer(999)
+ )
+ to_decode = der.encode_sequence(
+ ver_der,
+ algorithm_der,
+ privkey_der,
+ der.encode_octet_string(b"junk"),
+ )
+
+ with self.assertRaises(der.UnexpectedDER):
+ SigningKey.from_der(to_decode)
+
+ def test_sign_with_too_long_hash(self):
+ sk = SigningKey.from_secret_exponent(12)
+
+ with self.assertRaises(BadDigestError):
+ sk.sign_digest(b("\xff") * 64)
+
+ def test_hashfunc(self):
+ sk = SigningKey.generate(curve=NIST256p, hashfunc=sha256)
+ data = b("security level is 128 bits")
+ sig = sk.sign(data)
+ vk = VerifyingKey.from_string(
+ sk.get_verifying_key().to_string(), curve=NIST256p, hashfunc=sha256
+ )
+ self.assertTrue(vk.verify(sig, data))
+
+ sk2 = SigningKey.generate(curve=NIST256p)
+ sig2 = sk2.sign(data, hashfunc=sha256)
+ vk2 = VerifyingKey.from_string(
+ sk2.get_verifying_key().to_string(),
+ curve=NIST256p,
+ hashfunc=sha256,
+ )
+ self.assertTrue(vk2.verify(sig2, data))
+
+ vk3 = VerifyingKey.from_string(
+ sk.get_verifying_key().to_string(), curve=NIST256p
+ )
+ self.assertTrue(vk3.verify(sig, data, hashfunc=sha256))
+
+ def test_public_key_recovery(self):
+ # Create keys
+ curve = NIST256p
+
+ sk = SigningKey.generate(curve=curve)
+ vk = sk.get_verifying_key()
+
+ # Sign a message
+ data = b("blahblah")
+ signature = sk.sign(data)
+
+ # Recover verifying keys
+ recovered_vks = VerifyingKey.from_public_key_recovery(
+ signature, data, curve
+ )
+
+ # Test if each pk is valid
+ for recovered_vk in recovered_vks:
+ # Test if recovered vk is valid for the data
+ self.assertTrue(recovered_vk.verify(signature, data))
+
+ # Test if properties are equal
+ self.assertEqual(vk.curve, recovered_vk.curve)
+ self.assertEqual(
+ vk.default_hashfunc, recovered_vk.default_hashfunc
+ )
+
+ # Test if original vk is the list of recovered keys
+ self.assertTrue(
+ vk.pubkey.point
+ in [recovered_vk.pubkey.point for recovered_vk in recovered_vks]
+ )
+
+ def test_public_key_recovery_with_custom_hash(self):
+ # Create keys
+ curve = NIST256p
+
+ sk = SigningKey.generate(curve=curve, hashfunc=sha256)
+ vk = sk.get_verifying_key()
+
+ # Sign a message
+ data = b("blahblah")
+ signature = sk.sign(data)
+
+ # Recover verifying keys
+ recovered_vks = VerifyingKey.from_public_key_recovery(
+ signature, data, curve, hashfunc=sha256
+ )
+
+ # Test if each pk is valid
+ for recovered_vk in recovered_vks:
+ # Test if recovered vk is valid for the data
+ self.assertTrue(recovered_vk.verify(signature, data))
+
+ # Test if properties are equal
+ self.assertEqual(vk.curve, recovered_vk.curve)
+ self.assertEqual(sha256, recovered_vk.default_hashfunc)
+
+ # Test if original vk is the list of recovered keys
+ self.assertTrue(
+ vk.pubkey.point
+ in [recovered_vk.pubkey.point for recovered_vk in recovered_vks]
+ )
+
+ def test_encoding(self):
+ sk = SigningKey.from_secret_exponent(123456789)
+ vk = sk.verifying_key
+
+ exp = b(
+ "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3"
+ "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4"
+ "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*"
+ )
+ self.assertEqual(vk.to_string(), exp)
+ self.assertEqual(vk.to_string("raw"), exp)
+ self.assertEqual(vk.to_string("uncompressed"), b("\x04") + exp)
+ self.assertEqual(vk.to_string("compressed"), b("\x02") + exp[:24])
+ self.assertEqual(vk.to_string("hybrid"), b("\x06") + exp)
+
+ def test_decoding(self):
+ sk = SigningKey.from_secret_exponent(123456789)
+ vk = sk.verifying_key
+
+ enc = b(
+ "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3"
+ "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4"
+ "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*"
+ )
+
+ from_raw = VerifyingKey.from_string(enc)
+ self.assertEqual(from_raw.pubkey.point, vk.pubkey.point)
+
+ from_uncompressed = VerifyingKey.from_string(b("\x04") + enc)
+ self.assertEqual(from_uncompressed.pubkey.point, vk.pubkey.point)
+
+ from_compressed = VerifyingKey.from_string(b("\x02") + enc[:24])
+ self.assertEqual(from_compressed.pubkey.point, vk.pubkey.point)
+
+ from_uncompressed = VerifyingKey.from_string(b("\x06") + enc)
+ self.assertEqual(from_uncompressed.pubkey.point, vk.pubkey.point)
+
+ def test_decoding_with_malformed_uncompressed(self):
+ enc = b(
+ "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3"
+ "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4"
+ "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*"
+ )
+
+ with self.assertRaises(MalformedPointError):
+ VerifyingKey.from_string(b("\x02") + enc)
+
+ def test_decoding_with_malformed_compressed(self):
+ enc = b(
+ "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3"
+ "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4"
+ "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*"
+ )
+
+ with self.assertRaises(MalformedPointError):
+ VerifyingKey.from_string(b("\x01") + enc[:24])
+
+ def test_decoding_with_inconsistent_hybrid(self):
+ enc = b(
+ "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3"
+ "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4"
+ "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*"
+ )
+
+ with self.assertRaises(MalformedPointError):
+ VerifyingKey.from_string(b("\x07") + enc)
+
+ def test_decoding_with_point_not_on_curve(self):
+ enc = b(
+ "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3"
+ "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4"
+ "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*"
+ )
+
+ with self.assertRaises(MalformedPointError):
+ VerifyingKey.from_string(enc[:47] + b("\x00"))
+
+ def test_decoding_with_point_at_infinity(self):
+ # decoding it is unsupported, as it's not necessary to encode it
+ with self.assertRaises(MalformedPointError):
+ VerifyingKey.from_string(b("\x00"))
+
+ def test_not_lying_on_curve(self):
+ enc = number_to_string(NIST192p.curve.p(), NIST192p.curve.p() + 1)
+
+ with self.assertRaises(MalformedPointError):
+ VerifyingKey.from_string(b("\x02") + enc)
+
+ def test_from_string_with_invalid_curve_too_short_ver_key_len(self):
+ # both verifying_key_length and baselen are calculated internally
+ # by the Curve constructor, but since we depend on them verify
+ # that inconsistent values are detected
+ curve = Curve("test", ecdsa.curve_192, ecdsa.generator_192, (1, 2))
+ curve.verifying_key_length = 16
+ curve.baselen = 32
+
+ with self.assertRaises(MalformedPointError):
+ VerifyingKey.from_string(b("\x00") * 16, curve)
+
+ def test_from_string_with_invalid_curve_too_long_ver_key_len(self):
+ # both verifying_key_length and baselen are calculated internally
+ # by the Curve constructor, but since we depend on them verify
+ # that inconsistent values are detected
+ curve = Curve("test", ecdsa.curve_192, ecdsa.generator_192, (1, 2))
+ curve.verifying_key_length = 16
+ curve.baselen = 16
+
+ with self.assertRaises(MalformedPointError):
+ VerifyingKey.from_string(b("\x00") * 16, curve)
+
+
+ "val,even", [(i, j) for i in range(256) for j in [True, False]]
+)
+def test_VerifyingKey_decode_with_small_values(val, even):
+ enc = number_to_string(val, NIST192p.order)
+
+ if even:
+ enc = b("\x02") + enc
+ else:
+ enc = b("\x03") + enc
+
+ # small values can both be actual valid public keys and not, verify that
+ # only expected exceptions are raised if they are not
+ try:
+ vk = VerifyingKey.from_string(enc)
+ assert isinstance(vk, VerifyingKey)
+ except MalformedPointError:
+ assert True
+
+
+params = []
+for curve in curves:
+ for enc in ["raw", "uncompressed", "compressed", "hybrid"]:
+ params.append(
+ pytest.param(curve, enc, id="{0}-{1}".format(curve.name, enc))
+ )
+
+
[email protected]("curve,encoding", params)
+def test_VerifyingKey_encode_decode(curve, encoding):
+ sk = SigningKey.generate(curve=curve)
+ vk = sk.verifying_key
+
+ encoded = vk.to_string(encoding)
+
+ from_enc = VerifyingKey.from_string(encoded, curve=curve)
+
+ assert vk.pubkey.point == from_enc.pubkey.point
+
+
+class OpenSSL(unittest.TestCase):
+ # test interoperability with OpenSSL tools. Note that openssl's ECDSA
+ # sign/verify arguments changed between 0.9.8 and 1.0.0: the early
+ # versions require "-ecdsa-with-SHA1", the later versions want just
+ # "-SHA1" (or to leave out that argument entirely, which means the
+ # signature will use some default digest algorithm, probably determined
+ # by the key, probably always SHA1).
+ #
+ # openssl ecparam -name secp224r1 -genkey -out privkey.pem
+ # openssl ec -in privkey.pem -text -noout # get the priv/pub keys
+ # openssl dgst -ecdsa-with-SHA1 -sign privkey.pem -out data.sig data.txt
+ # openssl asn1parse -in data.sig -inform DER
+ # data.sig is 64 bytes, probably 56b plus ASN1 overhead
+ # openssl dgst -ecdsa-with-SHA1 -prverify privkey.pem -signature data.sig data.txt ; echo $?
+ # openssl ec -in privkey.pem -pubout -out pubkey.pem
+ # openssl ec -in privkey.pem -pubout -outform DER -out pubkey.der
+
+ OPENSSL_SUPPORTED_CURVES = set(
+ c.split(":")[0].strip()
+ for c in run_openssl("ecparam -list_curves").split("\n")
+ )
+
+ def get_openssl_messagedigest_arg(self, hash_name):
+ v = run_openssl("version")
+ # e.g. "OpenSSL 1.0.0 29 Mar 2010", or "OpenSSL 1.0.0a 1 Jun 2010",
+ # or "OpenSSL 0.9.8o 01 Jun 2010"
+ vs = v.split()[1].split(".")
+ if vs >= ["1", "0", "0"]: # pragma: no cover
+ return "-{0}".format(hash_name)
+ else: # pragma: no cover
+ return "-ecdsa-with-{0}".format(hash_name)
+
+ # sk: 1:OpenSSL->python 2:python->OpenSSL
+ # vk: 3:OpenSSL->python 4:python->OpenSSL
+ # sig: 5:OpenSSL->python 6:python->OpenSSL
+
+ @pytest.mark.skipif(
+ "prime192v1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support prime192v1",
+ )
+ def test_from_openssl_nist192p(self):
+ return self.do_test_from_openssl(NIST192p)
+
+ @pytest.mark.skipif(
+ "prime192v1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support prime192v1",
+ )
+ def test_from_openssl_nist192p_sha256(self):
+ return self.do_test_from_openssl(NIST192p, "SHA256")
+
+ @pytest.mark.skipif(
+ "secp224r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support secp224r1",
+ )
+ def test_from_openssl_nist224p(self):
+ return self.do_test_from_openssl(NIST224p)
+
+ @pytest.mark.skipif(
+ "prime256v1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support prime256v1",
+ )
+ def test_from_openssl_nist256p(self):
+ return self.do_test_from_openssl(NIST256p)
+
+ @pytest.mark.skipif(
+ "prime256v1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support prime256v1",
+ )
+ def test_from_openssl_nist256p_sha384(self):
+ return self.do_test_from_openssl(NIST256p, "SHA384")
+
+ @pytest.mark.skipif(
+ "prime256v1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support prime256v1",
+ )
+ def test_from_openssl_nist256p_sha512(self):
+ return self.do_test_from_openssl(NIST256p, "SHA512")
+
+ @pytest.mark.skipif(
+ "secp384r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support secp384r1",
+ )
+ def test_from_openssl_nist384p(self):
+ return self.do_test_from_openssl(NIST384p)
+
+ @pytest.mark.skipif(
+ "secp521r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support secp521r1",
+ )
+ def test_from_openssl_nist521p(self):
+ return self.do_test_from_openssl(NIST521p)
+
+ @pytest.mark.skipif(
+ "secp256k1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support secp256k1",
+ )
+ def test_from_openssl_secp256k1(self):
+ return self.do_test_from_openssl(SECP256k1)
+
+ @pytest.mark.skipif(
+ "brainpoolP160r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP160r1",
+ )
+ def test_from_openssl_brainpoolp160r1(self):
+ return self.do_test_from_openssl(BRAINPOOLP160r1)
+
+ @pytest.mark.skipif(
+ "brainpoolP192r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP192r1",
+ )
+ def test_from_openssl_brainpoolp192r1(self):
+ return self.do_test_from_openssl(BRAINPOOLP192r1)
+
+ @pytest.mark.skipif(
+ "brainpoolP224r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP224r1",
+ )
+ def test_from_openssl_brainpoolp224r1(self):
+ return self.do_test_from_openssl(BRAINPOOLP224r1)
+
+ @pytest.mark.skipif(
+ "brainpoolP256r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP256r1",
+ )
+ def test_from_openssl_brainpoolp256r1(self):
+ return self.do_test_from_openssl(BRAINPOOLP256r1)
+
+ @pytest.mark.skipif(
+ "brainpoolP320r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP320r1",
+ )
+ def test_from_openssl_brainpoolp320r1(self):
+ return self.do_test_from_openssl(BRAINPOOLP320r1)
+
+ @pytest.mark.skipif(
+ "brainpoolP384r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP384r1",
+ )
+ def test_from_openssl_brainpoolp384r1(self):
+ return self.do_test_from_openssl(BRAINPOOLP384r1)
+
+ @pytest.mark.skipif(
+ "brainpoolP512r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP512r1",
+ )
+ def test_from_openssl_brainpoolp512r1(self):
+ return self.do_test_from_openssl(BRAINPOOLP512r1)
+
+ def do_test_from_openssl(self, curve, hash_name="SHA1"):
+ curvename = curve.openssl_name
+ assert curvename
+ # OpenSSL: create sk, vk, sign.
+ # Python: read vk(3), checksig(5), read sk(1), sign, check
+ mdarg = self.get_openssl_messagedigest_arg(hash_name)
+ if os.path.isdir("t"): # pragma: no cover
+ shutil.rmtree("t")
+ os.mkdir("t")
+ run_openssl("ecparam -name %s -genkey -out t/privkey.pem" % curvename)
+ run_openssl("ec -in t/privkey.pem -pubout -out t/pubkey.pem")
+ data = b("data")
+ with open("t/data.txt", "wb") as e:
+ e.write(data)
+ run_openssl(
+ "dgst %s -sign t/privkey.pem -out t/data.sig t/data.txt" % mdarg
+ )
+ run_openssl(
+ "dgst %s -verify t/pubkey.pem -signature t/data.sig t/data.txt"
+ % mdarg
+ )
+ with open("t/pubkey.pem", "rb") as e:
+ pubkey_pem = e.read()
+ vk = VerifyingKey.from_pem(pubkey_pem) # 3
+ with open("t/data.sig", "rb") as e:
+ sig_der = e.read()
+ self.assertTrue(
+ vk.verify(
+ sig_der,
+ data, # 5
+ hashfunc=partial(hashlib.new, hash_name),
+ sigdecode=sigdecode_der,
+ )
+ )
+
+ with open("t/privkey.pem") as e:
+ fp = e.read()
+ sk = SigningKey.from_pem(fp) # 1
+ sig = sk.sign(data, hashfunc=partial(hashlib.new, hash_name))
+ self.assertTrue(
+ vk.verify(sig, data, hashfunc=partial(hashlib.new, hash_name))
+ )
+
+ run_openssl(
+ "pkcs8 -topk8 -nocrypt "
+ "-in t/privkey.pem -outform pem -out t/privkey-p8.pem"
+ )
+ with open("t/privkey-p8.pem", "rb") as e:
+ privkey_p8_pem = e.read()
+ sk_from_p8 = SigningKey.from_pem(privkey_p8_pem)
+ self.assertEqual(sk, sk_from_p8)
+
+ @pytest.mark.skipif(
+ "prime192v1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support prime192v1",
+ )
+ def test_to_openssl_nist192p(self):
+ self.do_test_to_openssl(NIST192p)
+
+ @pytest.mark.skipif(
+ "prime192v1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support prime192v1",
+ )
+ def test_to_openssl_nist192p_sha256(self):
+ self.do_test_to_openssl(NIST192p, "SHA256")
+
+ @pytest.mark.skipif(
+ "secp224r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support secp224r1",
+ )
+ def test_to_openssl_nist224p(self):
+ self.do_test_to_openssl(NIST224p)
+
+ @pytest.mark.skipif(
+ "prime256v1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support prime256v1",
+ )
+ def test_to_openssl_nist256p(self):
+ self.do_test_to_openssl(NIST256p)
+
+ @pytest.mark.skipif(
+ "prime256v1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support prime256v1",
+ )
+ def test_to_openssl_nist256p_sha384(self):
+ self.do_test_to_openssl(NIST256p, "SHA384")
+
+ @pytest.mark.skipif(
+ "prime256v1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support prime256v1",
+ )
+ def test_to_openssl_nist256p_sha512(self):
+ self.do_test_to_openssl(NIST256p, "SHA512")
+
+ @pytest.mark.skipif(
+ "secp384r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support secp384r1",
+ )
+ def test_to_openssl_nist384p(self):
+ self.do_test_to_openssl(NIST384p)
+
+ @pytest.mark.skipif(
+ "secp521r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support secp521r1",
+ )
+ def test_to_openssl_nist521p(self):
+ self.do_test_to_openssl(NIST521p)
+
+ @pytest.mark.skipif(
+ "secp256k1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support secp256k1",
+ )
+ def test_to_openssl_secp256k1(self):
+ self.do_test_to_openssl(SECP256k1)
+
+ @pytest.mark.skipif(
+ "brainpoolP160r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP160r1",
+ )
+ def test_to_openssl_brainpoolp160r1(self):
+ self.do_test_to_openssl(BRAINPOOLP160r1)
+
+ @pytest.mark.skipif(
+ "brainpoolP192r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP192r1",
+ )
+ def test_to_openssl_brainpoolp192r1(self):
+ self.do_test_to_openssl(BRAINPOOLP192r1)
+
+ @pytest.mark.skipif(
+ "brainpoolP224r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP224r1",
+ )
+ def test_to_openssl_brainpoolp224r1(self):
+ self.do_test_to_openssl(BRAINPOOLP224r1)
+
+ @pytest.mark.skipif(
+ "brainpoolP256r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP256r1",
+ )
+ def test_to_openssl_brainpoolp256r1(self):
+ self.do_test_to_openssl(BRAINPOOLP256r1)
+
+ @pytest.mark.skipif(
+ "brainpoolP320r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP320r1",
+ )
+ def test_to_openssl_brainpoolp320r1(self):
+ self.do_test_to_openssl(BRAINPOOLP320r1)
+
+ @pytest.mark.skipif(
+ "brainpoolP384r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP384r1",
+ )
+ def test_to_openssl_brainpoolp384r1(self):
+ self.do_test_to_openssl(BRAINPOOLP384r1)
+
+ @pytest.mark.skipif(
+ "brainpoolP512r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP512r1",
+ )
+ def test_to_openssl_brainpoolp512r1(self):
+ self.do_test_to_openssl(BRAINPOOLP512r1)
+
+ def do_test_to_openssl(self, curve, hash_name="SHA1"):
+ curvename = curve.openssl_name
+ assert curvename
+ # Python: create sk, vk, sign.
+ # OpenSSL: read vk(4), checksig(6), read sk(2), sign, check
+ mdarg = self.get_openssl_messagedigest_arg(hash_name)
+ if os.path.isdir("t"): # pragma: no cover
+ shutil.rmtree("t")
+ os.mkdir("t")
+ sk = SigningKey.generate(curve=curve)
+ vk = sk.get_verifying_key()
+ data = b("data")
+ with open("t/pubkey.der", "wb") as e:
+ e.write(vk.to_der()) # 4
+ with open("t/pubkey.pem", "wb") as e:
+ e.write(vk.to_pem()) # 4
+ sig_der = sk.sign(
+ data,
+ hashfunc=partial(hashlib.new, hash_name),
+ sigencode=sigencode_der,
+ )
+
+ with open("t/data.sig", "wb") as e:
+ e.write(sig_der) # 6
+ with open("t/data.txt", "wb") as e:
+ e.write(data)
+ with open("t/baddata.txt", "wb") as e:
+ e.write(data + b("corrupt"))
+
+ self.assertRaises(
+ SubprocessError,
+ run_openssl,
+ "dgst %s -verify t/pubkey.der -keyform DER -signature t/data.sig t/baddata.txt"
+ % mdarg,
+ )
+ run_openssl(
+ "dgst %s -verify t/pubkey.der -keyform DER -signature t/data.sig t/data.txt"
+ % mdarg
+ )
+
+ with open("t/privkey.pem", "wb") as e:
+ e.write(sk.to_pem()) # 2
+ run_openssl(
+ "dgst %s -sign t/privkey.pem -out t/data.sig2 t/data.txt" % mdarg
+ )
+ run_openssl(
+ "dgst %s -verify t/pubkey.pem -signature t/data.sig2 t/data.txt"
+ % mdarg
+ )
+
+ with open("t/privkey-p8.pem", "wb") as e:
+ e.write(sk.to_pem(format="pkcs8"))
+ run_openssl(
+ "dgst %s -sign t/privkey-p8.pem -out t/data.sig3 t/data.txt"
+ % mdarg
+ )
+ run_openssl(
+ "dgst %s -verify t/pubkey.pem -signature t/data.sig3 t/data.txt"
+ % mdarg
+ )
+
+
+class TooSmallCurve(unittest.TestCase):
+ OPENSSL_SUPPORTED_CURVES = set(
+ c.split(":")[0].strip()
+ for c in run_openssl("ecparam -list_curves").split("\n")
+ )
+
+ @pytest.mark.skipif(
+ "prime192v1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support prime192v1",
+ )
+ def test_sign_too_small_curve_dont_allow_truncate_raises(self):
+ sk = SigningKey.generate(curve=NIST192p)
+ vk = sk.get_verifying_key()
+ data = b("data")
+ with self.assertRaises(BadDigestError):
+ sk.sign(
+ data,
+ hashfunc=partial(hashlib.new, "SHA256"),
+ sigencode=sigencode_der,
+ allow_truncate=False,
+ )
+
+ @pytest.mark.skipif(
+ "prime192v1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support prime192v1",
+ )
+ def test_verify_too_small_curve_dont_allow_truncate_raises(self):
+ sk = SigningKey.generate(curve=NIST192p)
+ vk = sk.get_verifying_key()
+ data = b("data")
+ sig_der = sk.sign(
+ data,
+ hashfunc=partial(hashlib.new, "SHA256"),
+ sigencode=sigencode_der,
+ allow_truncate=True,
+ )
+ with self.assertRaises(BadDigestError):
+ vk.verify(
+ sig_der,
+ data,
+ hashfunc=partial(hashlib.new, "SHA256"),
+ sigdecode=sigdecode_der,
+ allow_truncate=False,
+ )
+
+
+class DER(unittest.TestCase):
+ def test_integer(self):
+ self.assertEqual(der.encode_integer(0), b("\x02\x01\x00"))
+ self.assertEqual(der.encode_integer(1), b("\x02\x01\x01"))
+ self.assertEqual(der.encode_integer(127), b("\x02\x01\x7f"))
+ self.assertEqual(der.encode_integer(128), b("\x02\x02\x00\x80"))
+ self.assertEqual(der.encode_integer(256), b("\x02\x02\x01\x00"))
+ # self.assertEqual(der.encode_integer(-1), b("\x02\x01\xff"))
+
+ def s(n):
+ return der.remove_integer(der.encode_integer(n) + b("junk"))
+
+ self.assertEqual(s(0), (0, b("junk")))
+ self.assertEqual(s(1), (1, b("junk")))
+ self.assertEqual(s(127), (127, b("junk")))
+ self.assertEqual(s(128), (128, b("junk")))
+ self.assertEqual(s(256), (256, b("junk")))
+ self.assertEqual(
+ s(1234567890123456789012345678901234567890),
+ (1234567890123456789012345678901234567890, b("junk")),
+ )
+
+ def test_number(self):
+ self.assertEqual(der.encode_number(0), b("\x00"))
+ self.assertEqual(der.encode_number(127), b("\x7f"))
+ self.assertEqual(der.encode_number(128), b("\x81\x00"))
+ self.assertEqual(der.encode_number(3 * 128 + 7), b("\x83\x07"))
+ # self.assertEqual(der.read_number("\x81\x9b" + "more"), (155, 2))
+ # self.assertEqual(der.encode_number(155), b("\x81\x9b"))
+ for n in (0, 1, 2, 127, 128, 3 * 128 + 7, 840, 10045): # , 155):
+ x = der.encode_number(n) + b("more")
+ n1, llen = der.read_number(x)
+ self.assertEqual(n1, n)
+ self.assertEqual(x[llen:], b("more"))
+
+ def test_length(self):
+ self.assertEqual(der.encode_length(0), b("\x00"))
+ self.assertEqual(der.encode_length(127), b("\x7f"))
+ self.assertEqual(der.encode_length(128), b("\x81\x80"))
+ self.assertEqual(der.encode_length(255), b("\x81\xff"))
+ self.assertEqual(der.encode_length(256), b("\x82\x01\x00"))
+ self.assertEqual(der.encode_length(3 * 256 + 7), b("\x82\x03\x07"))
+ self.assertEqual(der.read_length(b("\x81\x9b") + b("more")), (155, 2))
+ self.assertEqual(der.encode_length(155), b("\x81\x9b"))
+ for n in (0, 1, 2, 127, 128, 255, 256, 3 * 256 + 7, 155):
+ x = der.encode_length(n) + b("more")
+ n1, llen = der.read_length(x)
+ self.assertEqual(n1, n)
+ self.assertEqual(x[llen:], b("more"))
+
+ def test_sequence(self):
+ x = der.encode_sequence(b("ABC"), b("DEF")) + b("GHI")
+ self.assertEqual(x, b("\x30\x06ABCDEFGHI"))
+ x1, rest = der.remove_sequence(x)
+ self.assertEqual(x1, b("ABCDEF"))
+ self.assertEqual(rest, b("GHI"))
+
+ def test_constructed(self):
+ x = der.encode_constructed(0, NIST224p.encoded_oid)
+ self.assertEqual(hexlify(x), b("a007") + b("06052b81040021"))
+ x = der.encode_constructed(1, unhexlify(b("0102030a0b0c")))
+ self.assertEqual(hexlify(x), b("a106") + b("0102030a0b0c"))
+
+
+class Util(unittest.TestCase):
+ def test_trytryagain(self):
+ tta = util.randrange_from_seed__trytryagain
+ for i in range(1000):
+ seed = "seed-%d" % i
+ for order in (
+ 2 ** 8 - 2,
+ 2 ** 8 - 1,
+ 2 ** 8,
+ 2 ** 8 + 1,
+ 2 ** 8 + 2,
+ 2 ** 16 - 1,
+ 2 ** 16 + 1,
+ ):
+ n = tta(seed, order)
+ self.assertTrue(1 <= n < order, (1, n, order))
+ # this trytryagain *does* provide long-term stability
+ self.assertEqual(
+ ("%x" % (tta("seed", NIST224p.order))).encode(),
+ b("6fa59d73bf0446ae8743cf748fc5ac11d5585a90356417e97155c3bc"),
+ )
+
+ @given(st.integers(min_value=0, max_value=10 ** 200))
+ def test_randrange(self, i):
+ # util.randrange does not provide long-term stability: we might
+ # change the algorithm in the future.
+ entropy = util.PRNG("seed-%d" % i)
+ for order in (
+ 2 ** 8 - 2,
+ 2 ** 8 - 1,
+ 2 ** 8,
+ 2 ** 16 - 1,
+ 2 ** 16 + 1,
+ ):
+ # that oddball 2**16+1 takes half our runtime
+ n = util.randrange(order, entropy=entropy)
+ self.assertTrue(1 <= n < order, (1, n, order))
+
+ def OFF_test_prove_uniformity(self): # pragma: no cover
+ order = 2 ** 8 - 2
+ counts = dict([(i, 0) for i in range(1, order)])
+ assert 0 not in counts
+ assert order not in counts
+ for i in range(1000000):
+ seed = "seed-%d" % i
+ n = util.randrange_from_seed__trytryagain(seed, order)
+ counts[n] += 1
+ # this technique should use the full range
+ self.assertTrue(counts[order - 1])
+ for i in range(1, order):
+ print_("%3d: %s" % (i, "*" * (counts[i] // 100)))
+
+
+class RFC6979(unittest.TestCase):
+ # https://tools.ietf.org/html/rfc6979#appendix-A.1
+ def _do(self, generator, secexp, hsh, hash_func, expected):
+ actual = rfc6979.generate_k(generator.order(), secexp, hash_func, hsh)
+ self.assertEqual(expected, actual)
+
+ def test_SECP256k1(self):
+ """RFC doesn't contain test vectors for SECP256k1 used in bitcoin.
+ This vector has been computed by Golang reference implementation instead."""
+ self._do(
+ generator=SECP256k1.generator,
+ secexp=int("9d0219792467d7d37b4d43298a7d0c05", 16),
+ hsh=sha256(b("sample")).digest(),
+ hash_func=sha256,
+ expected=int(
+ "8fa1f95d514760e498f28957b824ee6ec39ed64826ff4fecc2b5739ec45b91cd",
+ 16,
+ ),
+ )
+
+ def test_SECP256k1_2(self):
+ self._do(
+ generator=SECP256k1.generator,
+ secexp=int(
+ "cca9fbcc1b41e5a95d369eaa6ddcff73b61a4efaa279cfc6567e8daa39cbaf50",
+ 16,
+ ),
+ hsh=sha256(b("sample")).digest(),
+ hash_func=sha256,
+ expected=int(
+ "2df40ca70e639d89528a6b670d9d48d9165fdc0febc0974056bdce192b8e16a3",
+ 16,
+ ),
+ )
+
+ def test_SECP256k1_3(self):
+ self._do(
+ generator=SECP256k1.generator,
+ secexp=0x1,
+ hsh=sha256(b("Satoshi Nakamoto")).digest(),
+ hash_func=sha256,
+ expected=0x8F8A276C19F4149656B280621E358CCE24F5F52542772691EE69063B74F15D15,
+ )
+
+ def test_SECP256k1_4(self):
+ self._do(
+ generator=SECP256k1.generator,
+ secexp=0x1,
+ hsh=sha256(
+ b(
+ "All those moments will be lost in time, like tears in rain. Time to die..."
+ )
+ ).digest(),
+ hash_func=sha256,
+ expected=0x38AA22D72376B4DBC472E06C3BA403EE0A394DA63FC58D88686C611ABA98D6B3,
+ )
+
+ def test_SECP256k1_5(self):
+ self._do(
+ generator=SECP256k1.generator,
+ secexp=0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364140,
+ hsh=sha256(b("Satoshi Nakamoto")).digest(),
+ hash_func=sha256,
+ expected=0x33A19B60E25FB6F4435AF53A3D42D493644827367E6453928554F43E49AA6F90,
+ )
+
+ def test_SECP256k1_6(self):
+ self._do(
+ generator=SECP256k1.generator,
+ secexp=0xF8B8AF8CE3C7CCA5E300D33939540C10D45CE001B8F252BFBC57BA0342904181,
+ hsh=sha256(b("Alan Turing")).digest(),
+ hash_func=sha256,
+ expected=0x525A82B70E67874398067543FD84C83D30C175FDC45FDEEE082FE13B1D7CFDF1,
+ )
+
+ def test_1(self):
+ # Basic example of the RFC, it also tests 'try-try-again' from Step H of rfc6979
+ self._do(
+ generator=Point(
+ None,
+ 0,
+ 0,
+ int("4000000000000000000020108A2E0CC0D99F8A5EF", 16),
+ ),
+ secexp=int("09A4D6792295A7F730FC3F2B49CBC0F62E862272F", 16),
+ hsh=unhexlify(
+ b(
+ "AF2BDBE1AA9B6EC1E2ADE1D694F41FC71A831D0268E9891562113D8A62ADD1BF"
+ )
+ ),
+ hash_func=sha256,
+ expected=int("23AF4074C90A02B3FE61D286D5C87F425E6BDD81B", 16),
+ )
+
+ def test_2(self):
+ self._do(
+ generator=NIST192p.generator,
+ secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
+ hsh=sha1(b("sample")).digest(),
+ hash_func=sha1,
+ expected=int(
+ "37D7CA00D2C7B0E5E412AC03BD44BA837FDD5B28CD3B0021", 16
+ ),
+ )
+
+ def test_3(self):
+ self._do(
+ generator=NIST192p.generator,
+ secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
+ hsh=sha256(b("sample")).digest(),
+ hash_func=sha256,
+ expected=int(
+ "32B1B6D7D42A05CB449065727A84804FB1A3E34D8F261496", 16
+ ),
+ )
+
+ def test_4(self):
+ self._do(
+ generator=NIST192p.generator,
+ secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
+ hsh=sha512(b("sample")).digest(),
+ hash_func=sha512,
+ expected=int(
+ "A2AC7AB055E4F20692D49209544C203A7D1F2C0BFBC75DB1", 16
+ ),
+ )
+
+ def test_5(self):
+ self._do(
+ generator=NIST192p.generator,
+ secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
+ hsh=sha1(b("test")).digest(),
+ hash_func=sha1,
+ expected=int(
+ "D9CF9C3D3297D3260773A1DA7418DB5537AB8DD93DE7FA25", 16
+ ),
+ )
+
+ def test_6(self):
+ self._do(
+ generator=NIST192p.generator,
+ secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
+ hsh=sha256(b("test")).digest(),
+ hash_func=sha256,
+ expected=int(
+ "5C4CE89CF56D9E7C77C8585339B006B97B5F0680B4306C6C", 16
+ ),
+ )
+
+ def test_7(self):
+ self._do(
+ generator=NIST192p.generator,
+ secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
+ hsh=sha512(b("test")).digest(),
+ hash_func=sha512,
+ expected=int(
+ "0758753A5254759C7CFBAD2E2D9B0792EEE44136C9480527", 16
+ ),
+ )
+
+ def test_8(self):
+ self._do(
+ generator=NIST521p.generator,
+ secexp=int(
+ "0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538",
+ 16,
+ ),
+ hsh=sha1(b("sample")).digest(),
+ hash_func=sha1,
+ expected=int(
+ "089C071B419E1C2820962321787258469511958E80582E95D8378E0C2CCDB3CB42BEDE42F50E3FA3C71F5A76724281D31D9C89F0F91FC1BE4918DB1C03A5838D0F9",
+ 16,
+ ),
+ )
+
+ def test_9(self):
+ self._do(
+ generator=NIST521p.generator,
+ secexp=int(
+ "0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538",
+ 16,
+ ),
+ hsh=sha256(b("sample")).digest(),
+ hash_func=sha256,
+ expected=int(
+ "0EDF38AFCAAECAB4383358B34D67C9F2216C8382AAEA44A3DAD5FDC9C32575761793FEF24EB0FC276DFC4F6E3EC476752F043CF01415387470BCBD8678ED2C7E1A0",
+ 16,
+ ),
+ )
+
+ def test_10(self):
+ self._do(
+ generator=NIST521p.generator,
+ secexp=int(
+ "0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538",
+ 16,
+ ),
+ hsh=sha512(b("test")).digest(),
+ hash_func=sha512,
+ expected=int(
+ "16200813020EC986863BEDFC1B121F605C1215645018AEA1A7B215A564DE9EB1B38A67AA1128B80CE391C4FB71187654AAA3431027BFC7F395766CA988C964DC56D",
+ 16,
+ ),
+ )
+
+
+class ECDH(unittest.TestCase):
+ def _do(self, curve, generator, dA, x_qA, y_qA, dB, x_qB, y_qB, x_Z, y_Z):
+ qA = dA * generator
+ qB = dB * generator
+ Z = dA * qB
+ self.assertEqual(Point(curve, x_qA, y_qA), qA)
+ self.assertEqual(Point(curve, x_qB, y_qB), qB)
+ self.assertTrue(
+ (dA * qB)
+ == (dA * dB * generator)
+ == (dB * dA * generator)
+ == (dB * qA)
+ )
+ self.assertEqual(Point(curve, x_Z, y_Z), Z)
+
+
+class RFC6932(ECDH):
+ # https://tools.ietf.org/html/rfc6932#appendix-A.1
+
+ def test_brainpoolP224r1(self):
+ self._do(
+ curve=curve_brainpoolp224r1,
+ generator=BRAINPOOLP224r1.generator,
+ dA=int(
+ "7C4B7A2C8A4BAD1FBB7D79CC0955DB7C6A4660CA64CC4778159B495E", 16
+ ),
+ x_qA=int(
+ "B104A67A6F6E85E14EC1825E1539E8ECDBBF584922367DD88C6BDCF2", 16
+ ),
+ y_qA=int(
+ "46D782E7FDB5F60CD8404301AC5949C58EDB26BC68BA07695B750A94", 16
+ ),
+ dB=int(
+ "63976D4AAE6CD0F6DD18DEFEF55D96569D0507C03E74D6486FFA28FB", 16
+ ),
+ x_qB=int(
+ "2A97089A9296147B71B21A4B574E1278245B536F14D8C2B9D07A874E", 16
+ ),
+ y_qB=int(
+ "9B900D7C77A709A797276B8CA1BA61BB95B546FC29F862E44D59D25B", 16
+ ),
+ x_Z=int(
+ "312DFD98783F9FB77B9704945A73BEB6DCCBE3B65D0F967DCAB574EB", 16
+ ),
+ y_Z=int(
+ "6F800811D64114B1C48C621AB3357CF93F496E4238696A2A012B3C98", 16
+ ),
+ )
+
+ def test_brainpoolP256r1(self):
+ self._do(
+ curve=curve_brainpoolp256r1,
+ generator=BRAINPOOLP256r1.generator,
+ dA=int(
+ "041EB8B1E2BC681BCE8E39963B2E9FC415B05283313DD1A8BCC055F11AE"
+ "49699",
+ 16,
+ ),
+ x_qA=int(
+ "78028496B5ECAAB3C8B6C12E45DB1E02C9E4D26B4113BC4F015F60C5C"
+ "CC0D206",
+ 16,
+ ),
+ y_qA=int(
+ "A2AE1762A3831C1D20F03F8D1E3C0C39AFE6F09B4D44BBE80CD100987"
+ "B05F92B",
+ 16,
+ ),
+ dB=int(
+ "06F5240EACDB9837BC96D48274C8AA834B6C87BA9CC3EEDD81F99A16B8D"
+ "804D3",
+ 16,
+ ),
+ x_qB=int(
+ "8E07E219BA588916C5B06AA30A2F464C2F2ACFC1610A3BE2FB240B635"
+ "341F0DB",
+ 16,
+ ),
+ y_qB=int(
+ "148EA1D7D1E7E54B9555B6C9AC90629C18B63BEE5D7AA6949EBBF47B2"
+ "4FDE40D",
+ 16,
+ ),
+ x_Z=int(
+ "05E940915549E9F6A4A75693716E37466ABA79B4BF2919877A16DD2CC2"
+ "E23708",
+ 16,
+ ),
+ y_Z=int(
+ "6BC23B6702BC5A019438CEEA107DAAD8B94232FFBBC350F3B137628FE6"
+ "FD134C",
+ 16,
+ ),
+ )
+
+ def test_brainpoolP384r1(self):
+ self._do(
+ curve=curve_brainpoolp384r1,
+ generator=BRAINPOOLP384r1.generator,
+ dA=int(
+ "014EC0755B78594BA47FB0A56F6173045B4331E74BA1A6F47322E70D79D"
+ "828D97E095884CA72B73FDABD5910DF0FA76A",
+ 16,
+ ),
+ x_qA=int(
+ "45CB26E4384DAF6FB776885307B9A38B7AD1B5C692E0C32F012533277"
+ "8F3B8D3F50CA358099B30DEB5EE69A95C058B4E",
+ 16,
+ ),
+ y_qA=int(
+ "8173A1C54AFFA7E781D0E1E1D12C0DC2B74F4DF58E4A4E3AF7026C5D3"
+ "2DC530A2CD89C859BB4B4B768497F49AB8CC859",
+ 16,
+ ),
+ dB=int(
+ "6B461CB79BD0EA519A87D6828815D8CE7CD9B3CAA0B5A8262CBCD550A01"
+ "5C90095B976F3529957506E1224A861711D54",
+ 16,
+ ),
+ x_qB=int(
+ "01BF92A92EE4BE8DED1A911125C209B03F99E3161CFCC986DC7711383"
+ "FC30AF9CE28CA3386D59E2C8D72CE1E7B4666E8",
+ 16,
+ ),
+ y_qB=int(
+ "3289C4A3A4FEE035E39BDB885D509D224A142FF9FBCC5CFE5CCBB3026"
+ "8EE47487ED8044858D31D848F7A95C635A347AC",
+ 16,
+ ),
+ x_Z=int(
+ "04CC4FF3DCCCB07AF24E0ACC529955B36D7C807772B92FCBE48F3AFE9A"
+ "2F370A1F98D3FA73FD0C0747C632E12F1423EC",
+ 16,
+ ),
+ y_Z=int(
+ "7F465F90BD69AFB8F828A214EB9716D66ABC59F17AF7C75EE7F1DE22AB"
+ "5D05085F5A01A9382D05BF72D96698FE3FF64E",
+ 16,
+ ),
+ )
+
+ def test_brainpoolP512r1(self):
+ self._do(
+ curve=curve_brainpoolp512r1,
+ generator=BRAINPOOLP512r1.generator,
+ dA=int(
+ "636B6BE0482A6C1C41AA7AE7B245E983392DB94CECEA2660A379CFE1595"
+ "59E357581825391175FC195D28BAC0CF03A7841A383B95C262B98378287"
+ "4CCE6FE333",
+ 16,
+ ),
+ x_qA=int(
+ "0562E68B9AF7CBFD5565C6B16883B777FF11C199161ECC427A39D17EC"
+ "2166499389571D6A994977C56AD8252658BA8A1B72AE42F4FB7532151"
+ "AFC3EF0971CCDA",
+ 16,
+ ),
+ y_qA=int(
+ "A7CA2D8191E21776A89860AFBC1F582FAA308D551C1DC6133AF9F9C3C"
+ "AD59998D70079548140B90B1F311AFB378AA81F51B275B2BE6B7DEE97"
+ "8EFC7343EA642E",
+ 16,
+ ),
+ dB=int(
+ "0AF4E7F6D52EDD52907BB8DBAB3992A0BB696EC10DF11892FF205B66D38"
+ "1ECE72314E6A6EA079CEA06961DBA5AE6422EF2E9EE803A1F236FB96A17"
+ "99B86E5C8B",
+ 16,
+ ),
+ x_qB=int(
+ "5A7954E32663DFF11AE24712D87419F26B708AC2B92877D6BFEE2BFC4"
+ "3714D89BBDB6D24D807BBD3AEB7F0C325F862E8BADE4F74636B97EAAC"
+ "E739E11720D323",
+ 16,
+ ),
+ y_qB=int(
+ "96D14621A9283A1BED84DE8DD64836B2C0758B11441179DC0C54C0D49"
+ "A47C03807D171DD544B72CAAEF7B7CE01C7753E2CAD1A861ECA55A719"
+ "54EE1BA35E04BE",
+ 16,
+ ),
+ x_Z=int(
+ "1EE8321A4BBF93B9CF8921AB209850EC9B7066D1984EF08C2BB7232362"
+ "08AC8F1A483E79461A00E0D5F6921CE9D360502F85C812BEDEE23AC5B2"
+ "10E5811B191E",
+ 16,
+ ),
+ y_Z=int(
+ "2632095B7B936174B41FD2FAF369B1D18DCADEED7E410A7E251F083109"
+ "7C50D02CFED02607B6A2D5ADB4C0006008562208631875B58B54ECDA5A"
+ "4F9FE9EAABA6",
+ 16,
+ ),
+ )
+
+
+class RFC7027(ECDH):
+ # https://tools.ietf.org/html/rfc7027#appendix-A
+
+ def test_brainpoolP256r1(self):
+ self._do(
+ curve=curve_brainpoolp256r1,
+ generator=BRAINPOOLP256r1.generator,
+ dA=int(
+ "81DB1EE100150FF2EA338D708271BE38300CB54241D79950F77B0630398"
+ "04F1D",
+ 16,
+ ),
+ x_qA=int(
+ "44106E913F92BC02A1705D9953A8414DB95E1AAA49E81D9E85F929A8E"
+ "3100BE5",
+ 16,
+ ),
+ y_qA=int(
+ "8AB4846F11CACCB73CE49CBDD120F5A900A69FD32C272223F789EF10E"
+ "B089BDC",
+ 16,
+ ),
+ dB=int(
+ "55E40BC41E37E3E2AD25C3C6654511FFA8474A91A0032087593852D3E7D"
+ "76BD3",
+ 16,
+ ),
+ x_qB=int(
+ "8D2D688C6CF93E1160AD04CC4429117DC2C41825E1E9FCA0ADDD34E6F"
+ "1B39F7B",
+ 16,
+ ),
+ y_qB=int(
+ "990C57520812BE512641E47034832106BC7D3E8DD0E4C7F1136D70065"
+ "47CEC6A",
+ 16,
+ ),
+ x_Z=int(
+ "89AFC39D41D3B327814B80940B042590F96556EC91E6AE7939BCE31F3A"
+ "18BF2B",
+ 16,
+ ),
+ y_Z=int(
+ "49C27868F4ECA2179BFD7D59B1E3BF34C1DBDE61AE12931648F43E5963"
+ "2504DE",
+ 16,
+ ),
+ )
+
+ def test_brainpoolP384r1(self):
+ self._do(
+ curve=curve_brainpoolp384r1,
+ generator=BRAINPOOLP384r1.generator,
+ dA=int(
+ "1E20F5E048A5886F1F157C74E91BDE2B98C8B52D58E5003D57053FC4B0B"
+ "D65D6F15EB5D1EE1610DF870795143627D042",
+ 16,
+ ),
+ x_qA=int(
+ "68B665DD91C195800650CDD363C625F4E742E8134667B767B1B476793"
+ "588F885AB698C852D4A6E77A252D6380FCAF068",
+ 16,
+ ),
+ y_qA=int(
+ "55BC91A39C9EC01DEE36017B7D673A931236D2F1F5C83942D049E3FA2"
+ "0607493E0D038FF2FD30C2AB67D15C85F7FAA59",
+ 16,
+ ),
+ dB=int(
+ "032640BC6003C59260F7250C3DB58CE647F98E1260ACCE4ACDA3DD869F7"
+ "4E01F8BA5E0324309DB6A9831497ABAC96670",
+ 16,
+ ),
+ x_qB=int(
+ "4D44326F269A597A5B58BBA565DA5556ED7FD9A8A9EB76C25F46DB69D"
+ "19DC8CE6AD18E404B15738B2086DF37E71D1EB4",
+ 16,
+ ),
+ y_qB=int(
+ "62D692136DE56CBE93BF5FA3188EF58BC8A3A0EC6C1E151A21038A42E"
+ "9185329B5B275903D192F8D4E1F32FE9CC78C48",
+ 16,
+ ),
+ x_Z=int(
+ "0BD9D3A7EA0B3D519D09D8E48D0785FB744A6B355E6304BC51C229FBBC"
+ "E239BBADF6403715C35D4FB2A5444F575D4F42",
+ 16,
+ ),
+ y_Z=int(
+ "0DF213417EBE4D8E40A5F76F66C56470C489A3478D146DECF6DF0D94BA"
+ "E9E598157290F8756066975F1DB34B2324B7BD",
+ 16,
+ ),
+ )
+
+ def test_brainpoolP512r1(self):
+ self._do(
+ curve=curve_brainpoolp512r1,
+ generator=BRAINPOOLP512r1.generator,
+ dA=int(
+ "16302FF0DBBB5A8D733DAB7141C1B45ACBC8715939677F6A56850A38BD8"
+ "7BD59B09E80279609FF333EB9D4C061231FB26F92EEB04982A5F1D1764C"
+ "AD57665422",
+ 16,
+ ),
+ x_qA=int(
+ "0A420517E406AAC0ACDCE90FCD71487718D3B953EFD7FBEC5F7F27E28"
+ "C6149999397E91E029E06457DB2D3E640668B392C2A7E737A7F0BF044"
+ "36D11640FD09FD",
+ 16,
+ ),
+ y_qA=int(
+ "72E6882E8DB28AAD36237CD25D580DB23783961C8DC52DFA2EC138AD4"
+ "72A0FCEF3887CF62B623B2A87DE5C588301EA3E5FC269B373B60724F5"
+ "E82A6AD147FDE7",
+ 16,
+ ),
+ dB=int(
+ "230E18E1BCC88A362FA54E4EA3902009292F7F8033624FD471B5D8ACE49"
+ "D12CFABBC19963DAB8E2F1EBA00BFFB29E4D72D13F2224562F405CB8050"
+ "3666B25429",
+ 16,
+ ),
+ x_qB=int(
+ "9D45F66DE5D67E2E6DB6E93A59CE0BB48106097FF78A081DE781CDB31"
+ "FCE8CCBAAEA8DD4320C4119F1E9CD437A2EAB3731FA9668AB268D871D"
+ "EDA55A5473199F",
+ 16,
+ ),
+ y_qB=int(
+ "2FDC313095BCDD5FB3A91636F07A959C8E86B5636A1E930E8396049CB"
+ "481961D365CC11453A06C719835475B12CB52FC3C383BCE35E27EF194"
+ "512B71876285FA",
+ 16,
+ ),
+ x_Z=int(
+ "A7927098655F1F9976FA50A9D566865DC530331846381C87256BAF3226"
+ "244B76D36403C024D7BBF0AA0803EAFF405D3D24F11A9B5C0BEF679FE1"
+ "454B21C4CD1F",
+ 16,
+ ),
+ y_Z=int(
+ "7DB71C3DEF63212841C463E881BDCF055523BD368240E6C3143BD8DEF8"
+ "B3B3223B95E0F53082FF5E412F4222537A43DF1C6D25729DDB51620A83"
+ "2BE6A26680A2",
+ 16,
+ ),
+ )
+
+
+# https://tools.ietf.org/html/rfc4754#page-5
+ "w, gwx, gwy, k, msg, md, r, s, curve",
+ [
+ pytest.param(
+ "DC51D3866A15BACDE33D96F992FCA99DA7E6EF0934E7097559C27F1614C88A7F",
+ "2442A5CC0ECD015FA3CA31DC8E2BBC70BF42D60CBCA20085E0822CB04235E970",
+ "6FC98BD7E50211A4A27102FA3549DF79EBCB4BF246B80945CDDFE7D509BBFD7D",
+ "9E56F509196784D963D1C0A401510EE7ADA3DCC5DEE04B154BF61AF1D5A6DECE",
+ b"abc",
+ sha256,
+ "CB28E0999B9C7715FD0A80D8E47A77079716CBBF917DD72E97566EA1C066957C",
+ "86FA3BB4E26CAD5BF90B7F81899256CE7594BB1EA0C89212748BFF3B3D5B0315",
+ NIST256p,
+ id="ECDSA-256",
+ ),
+ pytest.param(
+ "0BEB646634BA87735D77AE4809A0EBEA865535DE4C1E1DCB692E84708E81A5AF"
+ "62E528C38B2A81B35309668D73524D9F",
+ "96281BF8DD5E0525CA049C048D345D3082968D10FEDF5C5ACA0C64E6465A97EA"
+ "5CE10C9DFEC21797415710721F437922",
+ "447688BA94708EB6E2E4D59F6AB6D7EDFF9301D249FE49C33096655F5D502FAD"
+ "3D383B91C5E7EDAA2B714CC99D5743CA",
+ "B4B74E44D71A13D568003D7489908D564C7761E229C58CBFA18950096EB7463B"
+ "854D7FA992F934D927376285E63414FA",
+ b"abc",
+ sha384,
+ "FB017B914E29149432D8BAC29A514640B46F53DDAB2C69948084E2930F1C8F7E"
+ "08E07C9C63F2D21A07DCB56A6AF56EB3",
+ "B263A1305E057F984D38726A1B46874109F417BCA112674C528262A40A629AF1"
+ "CBB9F516CE0FA7D2FF630863A00E8B9F",
+ NIST384p,
+ id="ECDSA-384",
+ ),
+ pytest.param(
+ "0065FDA3409451DCAB0A0EAD45495112A3D813C17BFD34BDF8C1209D7DF58491"
+ "20597779060A7FF9D704ADF78B570FFAD6F062E95C7E0C5D5481C5B153B48B37"
+ "5FA1",
+ "0151518F1AF0F563517EDD5485190DF95A4BF57B5CBA4CF2A9A3F6474725A35F"
+ "7AFE0A6DDEB8BEDBCD6A197E592D40188901CECD650699C9B5E456AEA5ADD190"
+ "52A8",
+ "006F3B142EA1BFFF7E2837AD44C9E4FF6D2D34C73184BBAD90026DD5E6E85317"
+ "D9DF45CAD7803C6C20035B2F3FF63AFF4E1BA64D1C077577DA3F4286C58F0AEA"
+ "E643",
+ "00C1C2B305419F5A41344D7E4359933D734096F556197A9B244342B8B62F46F9"
+ "373778F9DE6B6497B1EF825FF24F42F9B4A4BD7382CFC3378A540B1B7F0C1B95"
+ "6C2F",
+ b"abc",
+ sha512,
+ "0154FD3836AF92D0DCA57DD5341D3053988534FDE8318FC6AAAAB68E2E6F4339"
+ "B19F2F281A7E0B22C269D93CF8794A9278880ED7DBB8D9362CAEACEE54432055"
+ "2251",
+ "017705A7030290D1CEB605A9A1BB03FF9CDD521E87A696EC926C8C10C8362DF4"
+ "975367101F67D1CF9BCCBF2F3D239534FA509E70AAC851AE01AAC68D62F86647"
+ "2660",
+ NIST521p,
+ id="ECDSA-521",
+ ),
+ ],
+)
+def test_RFC4754_vectors(w, gwx, gwy, k, msg, md, r, s, curve):
+ sk = SigningKey.from_string(unhexlify(w), curve)
+ vk = VerifyingKey.from_string(unhexlify(gwx + gwy), curve)
+ assert sk.verifying_key == vk
+ sig = sk.sign(msg, hashfunc=md, sigencode=sigencode_strings, k=int(k, 16))
+
+ assert sig == (unhexlify(r), unhexlify(s))
+
+ assert vk.verify(sig, msg, md, sigdecode_strings)
diff --git a/frozen_deps/ecdsa/test_rw_lock.py b/frozen_deps/ecdsa/test_rw_lock.py
new file mode 100644
index 0000000..d360482
--- /dev/null
+++ b/frozen_deps/ecdsa/test_rw_lock.py
@@ -0,0 +1,177 @@
+# Copyright Mateusz Kobos, (c) 2011
+# https://code.activestate.com/recipes/577803-reader-writer-lock-with-priority-for-writers/
+# released under the MIT licence
+
+import unittest
+import threading
+import time
+import copy
+from ._rwlock import RWLock
+
+
+class Writer(threading.Thread):
+ def __init__(
+ self, buffer_, rw_lock, init_sleep_time, sleep_time, to_write
+ ):
+ """
+ @param buffer_: common buffer_ shared by the readers and writers
+ @type buffer_: list
+ @type rw_lock: L{RWLock}
+ @param init_sleep_time: sleep time before doing any action
+ @type init_sleep_time: C{float}
+ @param sleep_time: sleep time while in critical section
+ @type sleep_time: C{float}
+ @param to_write: data that will be appended to the buffer
+ """
+ threading.Thread.__init__(self)
+ self.__buffer = buffer_
+ self.__rw_lock = rw_lock
+ self.__init_sleep_time = init_sleep_time
+ self.__sleep_time = sleep_time
+ self.__to_write = to_write
+ self.entry_time = None
+ """Time of entry to the critical section"""
+ self.exit_time = None
+ """Time of exit from the critical section"""
+
+ def run(self):
+ time.sleep(self.__init_sleep_time)
+ self.__rw_lock.writer_acquire()
+ self.entry_time = time.time()
+ time.sleep(self.__sleep_time)
+ self.__buffer.append(self.__to_write)
+ self.exit_time = time.time()
+ self.__rw_lock.writer_release()
+
+
+class Reader(threading.Thread):
+ def __init__(self, buffer_, rw_lock, init_sleep_time, sleep_time):
+ """
+ @param buffer_: common buffer shared by the readers and writers
+ @type buffer_: list
+ @type rw_lock: L{RWLock}
+ @param init_sleep_time: sleep time before doing any action
+ @type init_sleep_time: C{float}
+ @param sleep_time: sleep time while in critical section
+ @type sleep_time: C{float}
+ """
+ threading.Thread.__init__(self)
+ self.__buffer = buffer_
+ self.__rw_lock = rw_lock
+ self.__init_sleep_time = init_sleep_time
+ self.__sleep_time = sleep_time
+ self.buffer_read = None
+ """a copy of a the buffer read while in critical section"""
+ self.entry_time = None
+ """Time of entry to the critical section"""
+ self.exit_time = None
+ """Time of exit from the critical section"""
+
+ def run(self):
+ time.sleep(self.__init_sleep_time)
+ self.__rw_lock.reader_acquire()
+ self.entry_time = time.time()
+ time.sleep(self.__sleep_time)
+ self.buffer_read = copy.deepcopy(self.__buffer)
+ self.exit_time = time.time()
+ self.__rw_lock.reader_release()
+
+
+class RWLockTestCase(unittest.TestCase):
+ def test_readers_nonexclusive_access(self):
+ (buffer_, rw_lock, threads) = self.__init_variables()
+
+ threads.append(Reader(buffer_, rw_lock, 0, 0))
+ threads.append(Writer(buffer_, rw_lock, 0.2, 0.4, 1))
+ threads.append(Reader(buffer_, rw_lock, 0.3, 0.3))
+ threads.append(Reader(buffer_, rw_lock, 0.5, 0))
+
+ self.__start_and_join_threads(threads)
+
+ ## The third reader should enter after the second one but it should
+ ## exit before the second one exits
+ ## (i.e. the readers should be in the critical section
+ ## at the same time)
+
+ self.assertEqual([], threads[0].buffer_read)
+ self.assertEqual([1], threads[2].buffer_read)
+ self.assertEqual([1], threads[3].buffer_read)
+ self.assertTrue(threads[1].exit_time <= threads[2].entry_time)
+ self.assertTrue(threads[2].entry_time <= threads[3].entry_time)
+ self.assertTrue(threads[3].exit_time < threads[2].exit_time)
+
+ def test_writers_exclusive_access(self):
+ (buffer_, rw_lock, threads) = self.__init_variables()
+
+ threads.append(Writer(buffer_, rw_lock, 0, 0.4, 1))
+ threads.append(Writer(buffer_, rw_lock, 0.1, 0, 2))
+ threads.append(Reader(buffer_, rw_lock, 0.2, 0))
+
+ self.__start_and_join_threads(threads)
+
+ ## The second writer should wait for the first one to exit
+
+ self.assertEqual([1, 2], threads[2].buffer_read)
+ self.assertTrue(threads[0].exit_time <= threads[1].entry_time)
+ self.assertTrue(threads[1].exit_time <= threads[2].exit_time)
+
+ def test_writer_priority(self):
+ (buffer_, rw_lock, threads) = self.__init_variables()
+
+ threads.append(Writer(buffer_, rw_lock, 0, 0, 1))
+ threads.append(Reader(buffer_, rw_lock, 0.1, 0.4))
+ threads.append(Writer(buffer_, rw_lock, 0.2, 0, 2))
+ threads.append(Reader(buffer_, rw_lock, 0.3, 0))
+ threads.append(Reader(buffer_, rw_lock, 0.3, 0))
+
+ self.__start_and_join_threads(threads)
+
+ ## The second writer should go before the second and the third reader
+
+ self.assertEqual([1], threads[1].buffer_read)
+ self.assertEqual([1, 2], threads[3].buffer_read)
+ self.assertEqual([1, 2], threads[4].buffer_read)
+ self.assertTrue(threads[0].exit_time < threads[1].entry_time)
+ self.assertTrue(threads[1].exit_time <= threads[2].entry_time)
+ self.assertTrue(threads[2].exit_time <= threads[3].entry_time)
+ self.assertTrue(threads[2].exit_time <= threads[4].entry_time)
+
+ def test_many_writers_priority(self):
+ (buffer_, rw_lock, threads) = self.__init_variables()
+
+ threads.append(Writer(buffer_, rw_lock, 0, 0, 1))
+ threads.append(Reader(buffer_, rw_lock, 0.1, 0.6))
+ threads.append(Writer(buffer_, rw_lock, 0.2, 0.1, 2))
+ threads.append(Reader(buffer_, rw_lock, 0.3, 0))
+ threads.append(Reader(buffer_, rw_lock, 0.4, 0))
+ threads.append(Writer(buffer_, rw_lock, 0.5, 0.1, 3))
+
+ self.__start_and_join_threads(threads)
+
+ ## The two last writers should go first -- after the first reader and
+ ## before the second and the third reader
+
+ self.assertEqual([1], threads[1].buffer_read)
+ self.assertEqual([1, 2, 3], threads[3].buffer_read)
+ self.assertEqual([1, 2, 3], threads[4].buffer_read)
+ self.assertTrue(threads[0].exit_time < threads[1].entry_time)
+ self.assertTrue(threads[1].exit_time <= threads[2].entry_time)
+ self.assertTrue(threads[1].exit_time <= threads[5].entry_time)
+ self.assertTrue(threads[2].exit_time <= threads[3].entry_time)
+ self.assertTrue(threads[2].exit_time <= threads[4].entry_time)
+ self.assertTrue(threads[5].exit_time <= threads[3].entry_time)
+ self.assertTrue(threads[5].exit_time <= threads[4].entry_time)
+
+ @staticmethod
+ def __init_variables():
+ buffer_ = []
+ rw_lock = RWLock()
+ threads = []
+ return (buffer_, rw_lock, threads)
+
+ @staticmethod
+ def __start_and_join_threads(threads):
+ for t in threads:
+ t.start()
+ for t in threads:
+ t.join()
diff --git a/frozen_deps/ecdsa/util.py b/frozen_deps/ecdsa/util.py
new file mode 100644
index 0000000..e77d61c
--- /dev/null
+++ b/frozen_deps/ecdsa/util.py
@@ -0,0 +1,435 @@
+from __future__ import division
+
+import os
+import math
+import binascii
+import sys
+from hashlib import sha256
+from six import PY2, int2byte, b, next
+from . import der
+from ._compat import normalise_bytes
+
+# RFC5480:
+# The "unrestricted" algorithm identifier is:
+# id-ecPublicKey OBJECT IDENTIFIER ::= {
+# iso(1) member-body(2) us(840) ansi-X9-62(10045) keyType(2) 1 }
+
+oid_ecPublicKey = (1, 2, 840, 10045, 2, 1)
+encoded_oid_ecPublicKey = der.encode_oid(*oid_ecPublicKey)
+
+# RFC5480:
+# The ECDH algorithm uses the following object identifier:
+# id-ecDH OBJECT IDENTIFIER ::= {
+# iso(1) identified-organization(3) certicom(132) schemes(1)
+# ecdh(12) }
+
+oid_ecDH = (1, 3, 132, 1, 12)
+
+# RFC5480:
+# The ECMQV algorithm uses the following object identifier:
+# id-ecMQV OBJECT IDENTIFIER ::= {
+# iso(1) identified-organization(3) certicom(132) schemes(1)
+# ecmqv(13) }
+
+oid_ecMQV = (1, 3, 132, 1, 13)
+
+if sys.version_info >= (3,):
+
+ def entropy_to_bits(ent_256):
+ """Convert a bytestring to string of 0's and 1's"""
+ return bin(int.from_bytes(ent_256, "big"))[2:].zfill(len(ent_256) * 8)
+
+
+else:
+
+ def entropy_to_bits(ent_256):
+ """Convert a bytestring to string of 0's and 1's"""
+ return "".join(bin(ord(x))[2:].zfill(8) for x in ent_256)
+
+
+if sys.version_info < (2, 7):
+ # Can't add a method to a built-in type so we are stuck with this
+ def bit_length(x):
+ return len(bin(x)) - 2
+
+
+else:
+
+ def bit_length(x):
+ return x.bit_length() or 1
+
+
+def orderlen(order):
+ return (1 + len("%x" % order)) // 2 # bytes
+
+
+def randrange(order, entropy=None):
+ """Return a random integer k such that 1 <= k < order, uniformly
+ distributed across that range. Worst case should be a mean of 2 loops at
+ (2**k)+2.
+
+ Note that this function is not declared to be forwards-compatible: we may
+ change the behavior in future releases. The entropy= argument (which
+ should get a callable that behaves like os.urandom) can be used to
+ achieve stability within a given release (for repeatable unit tests), but
+ should not be used as a long-term-compatible key generation algorithm.
+ """
+ assert order > 1
+ if entropy is None:
+ entropy = os.urandom
+ upper_2 = bit_length(order - 2)
+ upper_256 = upper_2 // 8 + 1
+ while True: # I don't think this needs a counter with bit-wise randrange
+ ent_256 = entropy(upper_256)
+ ent_2 = entropy_to_bits(ent_256)
+ rand_num = int(ent_2[:upper_2], base=2) + 1
+ if 0 < rand_num < order:
+ return rand_num
+
+
+class PRNG:
+ # this returns a callable which, when invoked with an integer N, will
+ # return N pseudorandom bytes. Note: this is a short-term PRNG, meant
+ # primarily for the needs of randrange_from_seed__trytryagain(), which
+ # only needs to run it a few times per seed. It does not provide
+ # protection against state compromise (forward security).
+ def __init__(self, seed):
+ self.generator = self.block_generator(seed)
+
+ def __call__(self, numbytes):
+ a = [next(self.generator) for i in range(numbytes)]
+
+ if PY2:
+ return "".join(a)
+ else:
+ return bytes(a)
+
+ def block_generator(self, seed):
+ counter = 0
+ while True:
+ for byte in sha256(
+ ("prng-%d-%s" % (counter, seed)).encode()
+ ).digest():
+ yield byte
+ counter += 1
+
+
+def randrange_from_seed__overshoot_modulo(seed, order):
+ # hash the data, then turn the digest into a number in [1,order).
+ #
+ # We use David-Sarah Hopwood's suggestion: turn it into a number that's
+ # sufficiently larger than the group order, then modulo it down to fit.
+ # This should give adequate (but not perfect) uniformity, and simple
+ # code. There are other choices: try-try-again is the main one.
+ base = PRNG(seed)(2 * orderlen(order))
+ number = (int(binascii.hexlify(base), 16) % (order - 1)) + 1
+ assert 1 <= number < order, (1, number, order)
+ return number
+
+
+def lsb_of_ones(numbits):
+ return (1 << numbits) - 1
+
+
+def bits_and_bytes(order):
+ bits = int(math.log(order - 1, 2) + 1)
+ bytes = bits // 8
+ extrabits = bits % 8
+ return bits, bytes, extrabits
+
+
+# the following randrange_from_seed__METHOD() functions take an
+# arbitrarily-sized secret seed and turn it into a number that obeys the same
+# range limits as randrange() above. They are meant for deriving consistent
+# signing keys from a secret rather than generating them randomly, for
+# example a protocol in which three signing keys are derived from a master
+# secret. You should use a uniformly-distributed unguessable seed with about
+# curve.baselen bytes of entropy. To use one, do this:
+# seed = os.urandom(curve.baselen) # or other starting point
+# secexp = ecdsa.util.randrange_from_seed__trytryagain(sed, curve.order)
+# sk = SigningKey.from_secret_exponent(secexp, curve)
+
+
+def randrange_from_seed__truncate_bytes(seed, order, hashmod=sha256):
+ # hash the seed, then turn the digest into a number in [1,order), but
+ # don't worry about trying to uniformly fill the range. This will lose,
+ # on average, four bits of entropy.
+ bits, _bytes, extrabits = bits_and_bytes(order)
+ if extrabits:
+ _bytes += 1
+ base = hashmod(seed).digest()[:_bytes]
+ base = "\x00" * (_bytes - len(base)) + base
+ number = 1 + int(binascii.hexlify(base), 16)
+ assert 1 <= number < order
+ return number
+
+
+def randrange_from_seed__truncate_bits(seed, order, hashmod=sha256):
+ # like string_to_randrange_truncate_bytes, but only lose an average of
+ # half a bit
+ bits = int(math.log(order - 1, 2) + 1)
+ maxbytes = (bits + 7) // 8
+ base = hashmod(seed).digest()[:maxbytes]
+ base = "\x00" * (maxbytes - len(base)) + base
+ topbits = 8 * maxbytes - bits
+ if topbits:
+ base = int2byte(ord(base[0]) & lsb_of_ones(topbits)) + base[1:]
+ number = 1 + int(binascii.hexlify(base), 16)
+ assert 1 <= number < order
+ return number
+
+
+def randrange_from_seed__trytryagain(seed, order):
+ # figure out exactly how many bits we need (rounded up to the nearest
+ # bit), so we can reduce the chance of looping to less than 0.5 . This is
+ # specified to feed from a byte-oriented PRNG, and discards the
+ # high-order bits of the first byte as necessary to get the right number
+ # of bits. The average number of loops will range from 1.0 (when
+ # order=2**k-1) to 2.0 (when order=2**k+1).
+ assert order > 1
+ bits, bytes, extrabits = bits_and_bytes(order)
+ generate = PRNG(seed)
+ while True:
+ extrabyte = b("")
+ if extrabits:
+ extrabyte = int2byte(ord(generate(1)) & lsb_of_ones(extrabits))
+ guess = string_to_number(extrabyte + generate(bytes)) + 1
+ if 1 <= guess < order:
+ return guess
+
+
+def number_to_string(num, order):
+ l = orderlen(order)
+ fmt_str = "%0" + str(2 * l) + "x"
+ string = binascii.unhexlify((fmt_str % num).encode())
+ assert len(string) == l, (len(string), l)
+ return string
+
+
+def number_to_string_crop(num, order):
+ l = orderlen(order)
+ fmt_str = "%0" + str(2 * l) + "x"
+ string = binascii.unhexlify((fmt_str % num).encode())
+ return string[:l]
+
+
+def string_to_number(string):
+ return int(binascii.hexlify(string), 16)
+
+
+def string_to_number_fixedlen(string, order):
+ l = orderlen(order)
+ assert len(string) == l, (len(string), l)
+ return int(binascii.hexlify(string), 16)
+
+
+# these methods are useful for the sigencode= argument to SK.sign() and the
+# sigdecode= argument to VK.verify(), and control how the signature is packed
+# or unpacked.
+
+
+def sigencode_strings(r, s, order):
+ r_str = number_to_string(r, order)
+ s_str = number_to_string(s, order)
+ return (r_str, s_str)
+
+
+def sigencode_string(r, s, order):
+ """
+ Encode the signature to raw format (:term:`raw encoding`)
+
+ It's expected that this function will be used as a `sigencode=` parameter
+ in :func:`ecdsa.keys.SigningKey.sign` method.
+
+ :param int r: first parameter of the signature
+ :param int s: second parameter of the signature
+ :param int order: the order of the curve over which the signature was
+ computed
+
+ :return: raw encoding of ECDSA signature
+ :rtype: bytes
+ """
+ # for any given curve, the size of the signature numbers is
+ # fixed, so just use simple concatenation
+ r_str, s_str = sigencode_strings(r, s, order)
+ return r_str + s_str
+
+
+def sigencode_der(r, s, order):
+ """
+ Encode the signature into the ECDSA-Sig-Value structure using :term:`DER`.
+
+ Encodes the signature to the following :term:`ASN.1` structure::
+
+ Ecdsa-Sig-Value ::= SEQUENCE {
+ r INTEGER,
+ s INTEGER
+ }
+
+ It's expected that this function will be used as a `sigencode=` parameter
+ in :func:`ecdsa.keys.SigningKey.sign` method.
+
+ :param int r: first parameter of the signature
+ :param int s: second parameter of the signature
+ :param int order: the order of the curve over which the signature was
+ computed
+
+ :return: DER encoding of ECDSA signature
+ :rtype: bytes
+ """
+ return der.encode_sequence(der.encode_integer(r), der.encode_integer(s))
+
+
+# canonical versions of sigencode methods
+# these enforce low S values, by negating the value (modulo the order) if
+# above order/2 see CECKey::Sign()
+# https://github.com/bitcoin/bitcoin/blob/master/src/key.cpp#L214
+def sigencode_strings_canonize(r, s, order):
+ if s > order / 2:
+ s = order - s
+ return sigencode_strings(r, s, order)
+
+
+def sigencode_string_canonize(r, s, order):
+ if s > order / 2:
+ s = order - s
+ return sigencode_string(r, s, order)
+
+
+def sigencode_der_canonize(r, s, order):
+ if s > order / 2:
+ s = order - s
+ return sigencode_der(r, s, order)
+
+
+class MalformedSignature(Exception):
+ """
+ Raised by decoding functions when the signature is malformed.
+
+ Malformed in this context means that the relevant strings or integers
+ do not match what a signature over provided curve would create. Either
+ because the byte strings have incorrect lengths or because the encoded
+ values are too large.
+ """
+
+ pass
+
+
+def sigdecode_string(signature, order):
+ """
+ Decoder for :term:`raw encoding` of ECDSA signatures.
+
+ raw encoding is a simple concatenation of the two integers that comprise
+ the signature, with each encoded using the same amount of bytes depending
+ on curve size/order.
+
+ It's expected that this function will be used as the `sigdecode=`
+ parameter to the :func:`ecdsa.keys.VerifyingKey.verify` method.
+
+ :param signature: encoded signature
+ :type signature: bytes like object
+ :param order: order of the curve over which the signature was computed
+ :type order: int
+
+ :raises MalformedSignature: when the encoding of the signature is invalid
+
+ :return: tuple with decoded 'r' and 's' values of signature
+ :rtype: tuple of ints
+ """
+ signature = normalise_bytes(signature)
+ l = orderlen(order)
+ if not len(signature) == 2 * l:
+ raise MalformedSignature(
+ "Invalid length of signature, expected {0} bytes long, "
+ "provided string is {1} bytes long".format(2 * l, len(signature))
+ )
+ r = string_to_number_fixedlen(signature[:l], order)
+ s = string_to_number_fixedlen(signature[l:], order)
+ return r, s
+
+
+def sigdecode_strings(rs_strings, order):
+ """
+ Decode the signature from two strings.
+
+ First string needs to be a big endian encoding of 'r', second needs to
+ be a big endian encoding of the 's' parameter of an ECDSA signature.
+
+ It's expected that this function will be used as the `sigdecode=`
+ parameter to the :func:`ecdsa.keys.VerifyingKey.verify` method.
+
+ :param list rs_strings: list of two bytes-like objects, each encoding one
+ parameter of signature
+ :param int order: order of the curve over which the signature was computed
+
+ :raises MalformedSignature: when the encoding of the signature is invalid
+
+ :return: tuple with decoded 'r' and 's' values of signature
+ :rtype: tuple of ints
+ """
+ if not len(rs_strings) == 2:
+ raise MalformedSignature(
+ "Invalid number of strings provided: {0}, expected 2".format(
+ len(rs_strings)
+ )
+ )
+ (r_str, s_str) = rs_strings
+ r_str = normalise_bytes(r_str)
+ s_str = normalise_bytes(s_str)
+ l = orderlen(order)
+ if not len(r_str) == l:
+ raise MalformedSignature(
+ "Invalid length of first string ('r' parameter), "
+ "expected {0} bytes long, provided string is {1} "
+ "bytes long".format(l, len(r_str))
+ )
+ if not len(s_str) == l:
+ raise MalformedSignature(
+ "Invalid length of second string ('s' parameter), "
+ "expected {0} bytes long, provided string is {1} "
+ "bytes long".format(l, len(s_str))
+ )
+ r = string_to_number_fixedlen(r_str, order)
+ s = string_to_number_fixedlen(s_str, order)
+ return r, s
+
+
+def sigdecode_der(sig_der, order):
+ """
+ Decoder for DER format of ECDSA signatures.
+
+ DER format of signature is one that uses the :term:`ASN.1` :term:`DER`
+ rules to encode it as a sequence of two integers::
+
+ Ecdsa-Sig-Value ::= SEQUENCE {
+ r INTEGER,
+ s INTEGER
+ }
+
+ It's expected that this function will be used as as the `sigdecode=`
+ parameter to the :func:`ecdsa.keys.VerifyingKey.verify` method.
+
+ :param sig_der: encoded signature
+ :type sig_der: bytes like object
+ :param order: order of the curve over which the signature was computed
+ :type order: int
+
+ :raises UnexpectedDER: when the encoding of signature is invalid
+
+ :return: tuple with decoded 'r' and 's' values of signature
+ :rtype: tuple of ints
+ """
+ sig_der = normalise_bytes(sig_der)
+ # return der.encode_sequence(der.encode_integer(r), der.encode_integer(s))
+ rs_strings, empty = der.remove_sequence(sig_der)
+ if empty != b"":
+ raise der.UnexpectedDER(
+ "trailing junk after DER sig: %s" % binascii.hexlify(empty)
+ )
+ r, rest = der.remove_integer(rs_strings)
+ s, empty = der.remove_integer(rest)
+ if empty != b"":
+ raise der.UnexpectedDER(
+ "trailing junk after DER numbers: %s" % binascii.hexlify(empty)
+ )
+ return r, s