diff --git a/.github/workflows/lint-test-cover-docs.yml b/.github/workflows/lint-test-cover-docs.yml new file mode 100644 index 0000000..a635b11 --- /dev/null +++ b/.github/workflows/lint-test-cover-docs.yml @@ -0,0 +1,34 @@ +name: lint-test-cover-docs +on: + push +jobs: + lint_test_cover_docs: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.10'] + name: "Python ${{ matrix.python-version }}" + steps: + - uses: actions/checkout@v4 + - name: Install Python. + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + architecture: x64 + - name: Lint and test module. + run: | + pip install -U .[lint,test] + # python -m pylint tecdsa # Check against linting rules. + python -m pytest # Run tests. + - name: Publish coverage results. + run: | + pip install -U .[coveralls] + python -m coveralls --service=github # Submit to coveralls. + if: matrix.python-version == '3.11' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} + - name: Test auto-generation of documentation. + run: | + pip install -U .[docs] + cd docs && sphinx-apidoc -f -E --templatedir=_templates -o _source ../src && make html && cd .. \ No newline at end of file diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 0000000..96dd1e5 --- /dev/null +++ b/.pylintrc @@ -0,0 +1,631 @@ +[MAIN] + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Clear in-memory caches upon conclusion of linting. Useful if running pylint +# in a server-like mode. +clear-cache-post-run=no + +# Load and enable all available extensions. Use --list-extensions to see a list +# all available extensions. +#enable-all-extensions= + +# In error mode, messages with a category besides ERROR or FATAL are +# suppressed, and no reports are done by default. Error mode is compatible with +# disabling specific errors. +#errors-only= + +# Always return a 0 (non-error) status code, even if lint errors are found. +# This is primarily useful in continuous integration scripts. +#exit-zero= + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. +extension-pkg-allow-list= + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. (This is an alternative name to extension-pkg-allow-list +# for backward compatibility.) +extension-pkg-whitelist= + +# Return non-zero exit code if any of these messages/categories are detected, +# even if score is above --fail-under value. Syntax same as enable. Messages +# specified are enabled, while categories only check already-enabled messages. +fail-on= + +# Specify a score threshold under which the program will exit with error. +fail-under=10 + +# Interpret the stdin as a python script, whose filename needs to be passed as +# the module_or_package argument. +#from-stdin= + +# Files or directories to be skipped. They should be base names, not paths. +ignore=CVS + +# Add files or directories matching the regular expressions patterns to the +# ignore-list. The regex matches against paths and can be in Posix or Windows +# format. Because '\\' represents the directory delimiter on Windows systems, +# it can't be used as an escape character. +ignore-paths= + +# Files or directories matching the regular expression patterns are skipped. +# The regex matches against base names, not paths. The default value ignores +# Emacs file locks +ignore-patterns=^\.# + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis). It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use, and will cap the count on Windows to +# avoid hangs. +jobs=1 + +# Control the amount of potential inferred values when inferring a single +# object. This can help the performance when dealing with large functions or +# complex, nested conditions. +limit-inference-results=100 + +# List of plugins (as comma separated values of python module names) to load, +# usually to register additional checkers. +load-plugins= + +# Pickle collected data for later comparisons. +persistent=yes + +# Minimum Python version to use for version dependent checks. Will default to +# the version used to run pylint. +py-version=3.10 + +# Discover python modules and packages in the file system subtree. +recursive=no + +# Add paths to the list of the source roots. Supports globbing patterns. The +# source root is an absolute path or a path relative to the current working +# directory used to determine a package namespace for modules located under the +# source root. +source-roots= + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages. +suggestion-mode=yes + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + +# In verbose mode, extra non-checker-related info will be displayed. +#verbose= + + +[BASIC] + +# Naming style matching correct argument names. +argument-naming-style=snake_case + +# Regular expression matching correct argument names. Overrides argument- +# naming-style. If left empty, argument names will be checked with the set +# naming style. +argument-rgx=^(_?)[a-z][a-z0-9]*(_[a-z0-9]+)*(_?)(_?)$ + +# Naming style matching correct attribute names. +attr-naming-style=snake_case + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style. If left empty, attribute names will be checked with the set naming +# style. +attr-rgx=^(_?)[a-z][a-z0-9]*(_[a-z0-9]+)*(_?)(_?)$ + +# Bad variable names which should always be refused, separated by a comma. +bad-names=foo, + bar, + baz, + toto, + tutu, + tata + +# Bad variable names regexes, separated by a comma. If names match any regex, +# they will always be refused +bad-names-rgxs= + +# Naming style matching correct class attribute names. +class-attribute-naming-style=any + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style. If left empty, class attribute names will be checked +# with the set naming style. +#class-attribute-rgx= + +# Naming style matching correct class constant names. +class-const-naming-style=UPPER_CASE + +# Regular expression matching correct class constant names. Overrides class- +# const-naming-style. If left empty, class constant names will be checked with +# the set naming style. +#class-const-rgx= + +# Naming style matching correct class names. +class-naming-style=PascalCase + +# Regular expression matching correct class names. Overrides class-naming- +# style. If left empty, class names will be checked with the set naming style. +class-rgx=^[a-zA-Z][a-z0-9]*(_[a-z0-9]+)*$ + +# Naming style matching correct constant names. +const-naming-style=UPPER_CASE + +# Regular expression matching correct constant names. Overrides const-naming- +# style. If left empty, constant names will be checked with the set naming +# style. +#const-rgx= + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming style matching correct function names. +function-naming-style=snake_case + +# Regular expression matching correct function names. Overrides function- +# naming-style. If left empty, function names will be checked with the set +# naming style. +#function-rgx= + +# Good variable names which should always be accepted, separated by a comma. +good-names=i, + j, + k, + ex, + Run, + _ + +# Good variable names regexes, separated by a comma. If names match any regex, +# they will always be accepted +good-names-rgxs= + +# Include a hint for the correct naming format with invalid-name. +include-naming-hint=no + +# Naming style matching correct inline iteration names. +inlinevar-naming-style=any + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style. If left empty, inline iteration names will be checked +# with the set naming style. +#inlinevar-rgx= + +# Naming style matching correct method names. +method-naming-style=snake_case + +# Regular expression matching correct method names. Overrides method-naming- +# style. If left empty, method names will be checked with the set naming style. +#method-rgx= + +# Naming style matching correct module names. +module-naming-style=snake_case + +# Regular expression matching correct module names. Overrides module-naming- +# style. If left empty, module names will be checked with the set naming style. +#module-rgx= + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +# These decorators are taken in consideration only for invalid-name. +property-classes=abc.abstractproperty + +# Regular expression matching correct type alias names. If left empty, type +# alias names will be checked with the set naming style. +#typealias-rgx= + +# Regular expression matching correct type variable names. If left empty, type +# variable names will be checked with the set naming style. +#typevar-rgx= + +# Naming style matching correct variable names. +variable-naming-style=snake_case + +# Regular expression matching correct variable names. Overrides variable- +# naming-style. If left empty, variable names will be checked with the set +# naming style. +variable-rgx=^(_?)[a-z][a-z0-9]*(_[a-z0-9]+)*(_?)(_?)$ + + +[CLASSES] + +# Warn about protected attribute access inside special methods +check-protected-access-in-special-methods=no + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp, + asyncSetUp, + __post_init__ + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + + +[DESIGN] + +# List of regular expressions of class ancestor names to ignore when counting +# public methods (see R0903) +exclude-too-few-public-methods= + +# List of qualified class names to ignore when counting class parents (see +# R0901) +ignored-parents= + +# Maximum number of arguments for function / method. +max-args=5 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Maximum number of boolean expressions in an if statement (see R0916). +max-bool-expr=5 + +# Maximum number of branch for function / method body. +max-branches=12 + +# Maximum number of locals for function / method body. +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body. +max-returns=6 + +# Maximum number of statements in function / method body. +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when caught. +overgeneral-exceptions=builtins.BaseException,builtins.Exception + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=100 + +# Maximum number of lines in a module. +max-module-lines=1000 + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[IMPORTS] + +# List of modules that can be imported at any level, not just the top level +# one. +allow-any-import-level= + +# Allow explicit reexports by alias from a package __init__. +allow-reexport-from-package=no + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules= + +# Output a graph (.gv or any supported image format) of external dependencies +# to the given file (report RP0402 must not be disabled). +ext-import-graph= + +# Output a graph (.gv or any supported image format) of all (i.e. internal and +# external) dependencies to the given file (report RP0402 must not be +# disabled). +import-graph= + +# Output a graph (.gv or any supported image format) of internal dependencies +# to the given file (report RP0402 must not be disabled). +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + +# Couples of modules and preferred modules, separated by a comma. +preferred-modules= + + +[LOGGING] + +# The type of string formatting that logging methods do. `old` means using % +# formatting, `new` is for `{}` formatting. +logging-format-style=old + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules=logging + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, +# UNDEFINED. +confidence=HIGH, + CONTROL_FLOW, + INFERENCE, + INFERENCE_FAILURE, + UNDEFINED + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once). You can also use "--disable=all" to +# disable everything first and then re-enable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable=raw-checker-failed, + bad-inline-option, + locally-disabled, + file-ignored, + suppressed-message, + useless-suppression, + deprecated-pragma, + use-symbolic-message-instead + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable=c-extension-no-member + + +[METHOD_ARGS] + +# List of qualified names (i.e., library.method) which require a timeout +# parameter e.g. 'requests.api.get,requests.api.post' +timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX, + TODO + +# Regular expression of note tags to take in consideration. +notes-rgx= + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit,argparse.parse_error + + +[REPORTS] + +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'fatal', 'error', 'warning', 'refactor', +# 'convention', and 'info' which contain the number of messages in each +# category, as well as 'statement' which is the total number of statements +# analyzed. This score is used by the global evaluation report (RP0004). +evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +#output-format= + +# Tells whether to display a full report or only the messages. +reports=no + +# Activate the evaluation score. +score=yes + + +[SIMILARITIES] + +# Comments are removed from the similarity computation +ignore-comments=yes + +# Docstrings are removed from the similarity computation +ignore-docstrings=yes + +# Imports are removed from the similarity computation +ignore-imports=yes + +# Signatures are removed from the similarity computation +ignore-signatures=yes + +# Minimum lines number of a similarity. +min-similarity-lines=4 + + +[SPELLING] + +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions=4 + +# Spelling dictionary name. No available dictionaries : You need to install +# both the python package and the system dependency for enchant to work.. +spelling-dict= + +# List of comma separated words that should be considered directives if they +# appear at the beginning of a comment and should not be checked. +spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains the private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +spelling-store-unknown-words=no + + +[STRING] + +# This flag controls whether inconsistent-quotes generates a warning when the +# character used as a quote delimiter is used inconsistently within a module. +check-quote-consistency=no + +# This flag controls whether the implicit-str-concat should generate a warning +# on implicit string concatenation in sequences defined over several lines. +check-str-concat-over-line-jumps=no + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether to warn about missing members when the owner of the attribute +# is inferred to be None. +ignore-none=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of symbolic message names to ignore for Mixin members. +ignored-checks-for-mixins=no-member, + not-async-context-manager, + not-context-manager, + attribute-defined-outside-init + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + +# Regex pattern to define which classes are considered mixins. +mixin-class-rgx=.*[Mm]ixin + +# List of decorators that change the signature of a decorated function. +signature-mutators= + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid defining new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of names allowed to shadow builtins +allowed-redefined-builtins= + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb + +# A regular expression matching the name of dummy variables (i.e. expected to +# not be used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io \ No newline at end of file diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 0000000..48e0aaa --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,19 @@ +version: 2 + +sphinx: + configuration: docs/conf.py + +formats: + - pdf + +python: + install: + - method: pip + path: . + extra_requirements: + - docs + +build: + os: "ubuntu-22.04" + tools: + python: "3.11" \ No newline at end of file diff --git a/README.md b/README.md deleted file mode 100644 index 9979920..0000000 --- a/README.md +++ /dev/null @@ -1 +0,0 @@ -# tb-ecdsa \ No newline at end of file diff --git a/README.rst b/README.rst new file mode 100644 index 0000000..efc4e09 --- /dev/null +++ b/README.rst @@ -0,0 +1,154 @@ +======= +tecdsa +======= + +Pure-Python implementation of a `threshold ecdsa signature scheme `__ based on a secure multi-party computation (MPC) `protocol for evaluating arithmetic sum-of-products expressions `__ via a non-interactive computation phase. + +|pypi| |readthedocs| |actions| |coveralls| + +.. |pypi| image:: https://badge.fury.io/py/tecdsa.svg + :target: https://badge.fury.io/py/tecdsa + :alt: PyPI version and link. + +.. |readthedocs| image:: https://readthedocs.org/projects/tecdsa/badge/?version=latest + :target: https://tecdsa.readthedocs.io/en/latest/?badge=latest + :alt: Read the Docs documentation status. + +.. |actions| image:: https://github.com/nillion-oss/tecdsa/workflows/lint-test-cover-docs/badge.svg + :target: https://github.com/nillion-oss/tecdsa/actions/workflows/lint-test-cover-docs.yml + :alt: GitHub Actions status. + +.. |coveralls| image:: https://coveralls.io/repos/github/nillion-oss/tecdsa/badge.svg?branch=main + :target: https://coveralls.io/github/nillion-oss/tecdsa?branch=main + :alt: Coveralls test coverage summary. + +Installation and Usage +---------------------- + +This library is available as a `package on PyPI `__: + +.. code-block:: bash + + python -m pip install tecdsa + +The library can be imported in the usual way: + +.. code-block:: python + + import tecdsa + from tecdsa import * + +Basic Example +^^^^^^^^^^^^^ + +This implementation includes the emulation of a network comprised of computing nodes and clients. We can either deploy +a network for DSA or ECDSA. In this example, we will show the Elliptic Curve version with 3 nodes and 1 client with id set to 1. + +.. code-block:: python + + >>> N = 3; C = 1; client_id = 1 + +To kick things off, let's initialize the network using ECDSA with the P-256 curve: + +.. code-block:: python + + >>> ecdsa_setup = ECDSASetup(curve="P-256") + >>> ecnet = ThresholdSignature(N, C, setup=ecdsa_setup) + +The first protocol involves distributing a key triple among the nodes: + +.. code-block:: python + + >>> ecnet.distributed_key_generation_protocol(client_id) + +The signature protocol unfolds in two phases: the preprocessing phase and the signing phase. +Let's run the preprocessing phase: + +.. code-block:: python + + >>> ecnet.ts_prep_protocol(client_id) + +After defining a message we can sign it as follows: + +.. code-block:: python + + >>> message = "Let me tell you a great secret about Nillion." + >>> ecnet.ts_online_protocol(message, client_id) + +We run the following to print the signature owned by the client (ID=1): + +.. code-block:: python + + >>> ecnet.print_signature(client_id) + +For a deeper dive, please check the `demos` folder. + +Development +----------- +All installation and development dependencies are fully specified in ``pyproject.toml``. The ``project.optional-dependencies`` object is used to `specify optional requirements `__ for various development tasks. This makes it possible to specify additional options (such as ``docs``, ``lint``, and so on) when performing installation using `pip `__: + +.. code-block:: bash + + python -m pip install .[docs,lint] + +Documentation +^^^^^^^^^^^^^ +The documentation can be generated automatically from the source files using `Sphinx `__: + +.. code-block:: bash + + python -m pip install .[docs] + cd docs + sphinx-apidoc -f -E --templatedir=_templates -o _source ../src && make html + +Testing and Conventions +^^^^^^^^^^^^^^^^^^^^^^^ +All unit tests are executed and their coverage is measured when using `pytest `__ (see the ``pyproject.toml`` file for configuration details): + +.. code-block:: bash + + python -m pip install .[test] + python -m pytest + +Style conventions are enforced using `Pylint `__: + +.. code-block:: bash + + python -m pip install .[lint] + python -m pylint src/tecdsa + +Contributions +^^^^^^^^^^^^^ +In order to contribute to the source code, open an issue or submit a pull request on the `GitHub page `__ for this library. + +Versioning +^^^^^^^^^^ +The version number format for this library and the changes to the library associated with version number increments conform with `Semantic Versioning 2.0.0 `__. + +Publishing +^^^^^^^^^^ +This library can be published as a `package on PyPI `__ by a package maintainer. First, install the dependencies required for packaging and publishing: + +.. code-block:: bash + + python -m pip install .[publish] + +Ensure that the correct version number appears in ``pyproject.toml``, and that any links in this README document to the Read the Docs documentation of this package (or its dependencies) have appropriate version numbers. Also ensure that the Read the Docs project for this library has an `automation rule `__ that activates and sets as the default all tagged versions. Create and push a tag for this version (replacing ``?.?.?`` with the version number): + +.. code-block:: bash + + git tag ?.?.? + git push origin ?.?.? + +Remove any old build/distribution files. Then, package the source into a distribution archive: + +.. code-block:: bash + + rm -rf build dist src/*.egg-info + python -m build --sdist --wheel . + +Finally, upload the package distribution archive to `PyPI `__: + +.. code-block:: bash + + python -m twine upload dist/* diff --git a/demos/intro_demo.ipynb b/demos/intro_demo.ipynb new file mode 100644 index 0000000..9df20e8 --- /dev/null +++ b/demos/intro_demo.ipynb @@ -0,0 +1,766 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "e90c84a3-2e69-431c-b8e9-8e0e81e8dada", + "metadata": {}, + "outputs": [], + "source": [ + "from tecdsa.utils import verify_ecdsa_signature, verify_dsa_signature\n", + "from tecdsa.setup import DSASetup, ECDSASetup\n", + "from tecdsa.tecdsa import ThresholdSignature" + ] + }, + { + "cell_type": "markdown", + "id": "4216cd42-980d-4394-914e-2c627830da3e", + "metadata": {}, + "source": [ + "# Setup network\n", + "\n", + "Depending on the signature type we are aiming at (DSA or ECDSA), we can start the network with the class corresponding to that type. Let us see an example with 3 nodes and 1 client." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "52183e54-5528-429b-b575-4534e409fb8a", + "metadata": {}, + "outputs": [], + "source": [ + "N = 3; C = 1" + ] + }, + { + "cell_type": "markdown", + "id": "1ecf3376-ca1b-4636-b9fd-bc5a1e98e88d", + "metadata": {}, + "source": [ + "### DSA\n", + "\n", + "To start a DSA signature, we use the class DSASetup. There are two options here:\n", + "1. Generate the DSA setup from scratch; \n", + "2. Use a DSA setup from predefined variables." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "ebd53f46-0823-43ac-8678-7a536e7a2131", + "metadata": {}, + "outputs": [], + "source": [ + "# Option 1\n", + "fnil = ThresholdSignature(N, C)" + ] + }, + { + "cell_type": "markdown", + "id": "3b131b8a-ac50-4c31-a919-df9da22df805", + "metadata": {}, + "source": [ + "This option takes longer as the `generate_dsa_setup()` function has to find a primitive root for the `q` prime of the DSA setup." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "4ae4685f-14df-4ffc-94a4-a9ad84a2b9ed", + "metadata": {}, + "outputs": [], + "source": [ + "# Option 2\n", + "p = 16987220163402883416449356930313946536948708368250187300904484990592060034399925373558684845589122357155245527725130833676269318205326149268410610561367974110319706088695097181729621805806503895242356834473026604015120592348890617701675387428807728090415853765634415325555621648235338466349957683063948139664640253794461972428009207212678775162641560258829400418398089166048123240989061901894801714545511227607162820358567023001939860545346903340718565981921863209491363679301897076211852920953764568258713784702534591771430944194446014721504677270780731006777716425745362753111142293413901721847152726957943402872047 \n", + "q = 18615201011907374064080708325380633467600489307695820739772219003499; \n", + "g = 1440750739647392583923353319762863205412412735463771135600354281498545556560554285032144083567469348458038821471561505478727536048946568600306333026876282227253717145726280535747755789389298351217147981114134445522434273687560094566805116079958307881112688486903459951003823567315217837479260063662350297462681218852749673559083125704211468000331391500202590446254295826681166987302499736009857926325072657165790463352645906484288271010829609728496608136458748019477999277575560554800692468233144862681997749241093911491601564874805253852956797072221469937174346581408575685518457073416604892562933677029344283366064\n", + "h = 2\n", + "dsa_setup = DSASetup(p, q, g, h)\n", + "fnil = ThresholdSignature(N, C, setup=dsa_setup)" + ] + }, + { + "cell_type": "markdown", + "id": "de9793bd-487e-4448-9389-a917d703bf35", + "metadata": {}, + "source": [ + "**Debug mode**: the network can be set to debug mode to keep all elements sent and received throughout protocol execution." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "90175b45-5a1e-4159-8b87-dd1f27921ab6", + "metadata": {}, + "outputs": [], + "source": [ + "fnil_debug = ThresholdSignature(N, C, setup=dsa_setup, debug=True)" + ] + }, + { + "cell_type": "markdown", + "id": "9adfe856-753d-4161-8314-0ef766815d5b", + "metadata": {}, + "source": [ + "### ECDSA\n", + "\n", + "For ECDSA version, the can use a set of curves:" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "ca26a74a-e5d5-4c6d-99ca-3044140797b2", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['P-192', 'P-224', 'P-256', 'P-384', 'P-521']" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ECDSASetup.supported_curves()" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "cb2ea243-53c8-46cf-8e5a-b8a738aa9892", + "metadata": {}, + "outputs": [], + "source": [ + "ecdsa_setup = ECDSASetup(curve=\"P-256\")\n", + "ecnil = ThresholdSignature(N, C, setup=ecdsa_setup)" + ] + }, + { + "cell_type": "markdown", + "id": "06c6493c-bf6c-4747-b643-09706fadf683", + "metadata": {}, + "source": [ + "**Debug mode**" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "b772c05b-b000-497e-b48b-014803379e04", + "metadata": {}, + "outputs": [], + "source": [ + "ecnil_debug = ThresholdSignature(N, C, setup=ecdsa_setup, debug=True)" + ] + }, + { + "cell_type": "markdown", + "id": "41265ae9-4c9e-4b88-bc1e-d9a7b774ff0a", + "metadata": {}, + "source": [ + "# Threshold (EC)DSA \n", + "\n", + "In the following demo, we emulate a network. For this reason, communication is given as a simple API and the client in the network request the network to sign some message by introducing its `id`." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "90adde68-4233-4bfd-b0f9-3bbbcf7d28e9", + "metadata": {}, + "outputs": [], + "source": [ + "client_id = 1" + ] + }, + { + "cell_type": "markdown", + "id": "f693c33b-bd81-4b02-a4f7-dca265c3afa5", + "metadata": {}, + "source": [ + "## DSA\n", + "\n", + "We start by generating the secret key shared throughout the nodes. This only has to be done once." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "0b287f5d-3077-4a05-9d68-7cb36a0f21d0", + "metadata": {}, + "outputs": [], + "source": [ + "# DKG protocol for DSA\n", + "fnil.distributed_key_generation_protocol(client_id)" + ] + }, + { + "cell_type": "markdown", + "id": "9fa5cbcd-8b4a-4cb9-9304-953ab2b7d512", + "metadata": {}, + "source": [ + "Now, the client can ask the network to sign a message at their will without the network knowing the message." + ] + }, + { + "cell_type": "markdown", + "id": "536f666b-adb9-4594-82cd-ae932af4a010", + "metadata": {}, + "source": [ + "We can start the preprocessing phase before the message is defined." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "19150989-644e-4b86-9622-9c31fc1fa236", + "metadata": {}, + "outputs": [], + "source": [ + "# Preprocessing phase (message independent)\n", + "fnil.ts_prep_protocol(client_id)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "80bcd373-6945-4100-ab78-b565d1a1e35f", + "metadata": {}, + "outputs": [], + "source": [ + "message = \"Let me tell you a great secret about Nillion.\"" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "b2783d72-351e-4258-a7bb-407b23c01529", + "metadata": {}, + "outputs": [], + "source": [ + "# Online phase (message dependent)\n", + "fnil.ts_online_protocol(message, client_id)" + ] + }, + { + "cell_type": "markdown", + "id": "d3c2873d-d2b0-412c-b9f8-355d392c6981", + "metadata": {}, + "source": [ + "We can run the above two cells as many times as we want without having to run the DKG protocol from above." + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "4996c8e7-577e-4a9b-a045-f78729dba715", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " Client(id=1,\n", + " r=6202005962638382719598591827040686943912736840956149356327459726601,\n", + " s=18131665991815852492200116613897714960534471085827748460942091666925,\n", + " m=Let me tell you a great secret about Nillion.,\n", + " )\n" + ] + } + ], + "source": [ + "fnil.print_signature(client_id)" + ] + }, + { + "cell_type": "markdown", + "id": "1a702ef7-e514-4b4a-9824-7ca44b502d39", + "metadata": {}, + "source": [ + "#### Signature verification\n", + "\n", + "The protocol already has included a verification of the signature. However, we can also verify for ourselves:" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "cae841ea-ea6e-420d-891f-c34a6f8b55df", + "metadata": {}, + "outputs": [], + "source": [ + "# Signature\n", + "r, s, m = fnil.retrieve_signature(client_id)\n", + "# Public parameters (client's public key, dsa setup)\n", + "y = fnil.clients[client_id - 1].get_open(str(client_id)+\"th_client_x_pk\")\n", + "q = fnil.q\n", + "p = fnil.dsa.p\n", + "g = fnil.dsa.g\n", + "# Verify\n", + "verify_dsa_signature(message, r, s, y, p, q, g)" + ] + }, + { + "cell_type": "markdown", + "id": "31e99544-8d7e-45f9-a452-387eb54e882c", + "metadata": {}, + "source": [ + "## ECDSA\n", + "\n", + "Everything works similarly to the ECDSA version. " + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "03d48d0b-1b19-4e87-9423-e7ff35df7133", + "metadata": {}, + "outputs": [], + "source": [ + "# DKG protocol for ECDSA\n", + "ecnil.distributed_key_generation_protocol(client_id)" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "8d206ddb-8670-49fa-aca0-1440b5d6f8f3", + "metadata": {}, + "outputs": [], + "source": [ + "# Preprocessing phase (message independent)\n", + "ecnil.ts_prep_protocol(client_id)" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "23b19bd5-9ceb-46b0-a24c-ed5ec88bf005", + "metadata": {}, + "outputs": [], + "source": [ + "message = \"This is one of many releases we are going to have in the next few months.\"" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "6330a904-5111-4d22-a8ce-e05cb709408e", + "metadata": {}, + "outputs": [], + "source": [ + "# Online phase (message dependent)\n", + "ecnil.ts_online_protocol(message, client_id)" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "id": "28cb83d7-a931-4818-bb27-1701fd6a5f1f", + "metadata": {}, + "outputs": [], + "source": [ + "# Signature\n", + "r, s, m = ecnil.retrieve_signature(client_id)\n", + "# Public parameters (client's public key, ecdsa setup)\n", + "Y = ecnil.clients[client_id - 1].get_open(str(client_id)+\"th_client_x_pk\")\n", + "q = ecnil.q\n", + "G = ecnil.ecdsa.G\n", + "# Verify\n", + "verify_ecdsa_signature(message, r, s, Y, q, G)" + ] + }, + { + "cell_type": "markdown", + "id": "866f5b19-b238-4dc0-8b76-7679500e657f", + "metadata": {}, + "source": [ + "Let us see a case where the signature is not valid for some message. We give it a different message." + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "id": "4f69041c-19b0-41d4-83ad-87f8118eda24", + "metadata": {}, + "outputs": [ + { + "ename": "VerifySignatureError", + "evalue": "Signature verification failed. Signature mismatch. Abort.", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mVerifySignatureError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[28], line 9\u001b[0m\n\u001b[1;32m 7\u001b[0m G \u001b[38;5;241m=\u001b[39m ecnil\u001b[38;5;241m.\u001b[39mecdsa\u001b[38;5;241m.\u001b[39mG\n\u001b[1;32m 8\u001b[0m \u001b[38;5;66;03m# Verify\u001b[39;00m\n\u001b[0;32m----> 9\u001b[0m \u001b[43mverify_ecdsa_signature\u001b[49m\u001b[43m(\u001b[49m\u001b[43mother_message\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mr\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43ms\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mY\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mq\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mG\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/.pyenv/versions/3.10.10/envs/ecdsa/lib/python3.10/site-packages/tbecdsa/utils.py:190\u001b[0m, in \u001b[0;36mverify_ecdsa_signature\u001b[0;34m(message, r, s, Y, q, G)\u001b[0m\n\u001b[1;32m 188\u001b[0m v \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mint\u001b[39m(V\u001b[38;5;241m.\u001b[39mx)\n\u001b[1;32m 189\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m v \u001b[38;5;241m!=\u001b[39m r:\n\u001b[0;32m--> 190\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m VerifySignatureError(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mSignature mismatch. Abort.\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n", + "\u001b[0;31mVerifySignatureError\u001b[0m: Signature verification failed. Signature mismatch. Abort." + ] + } + ], + "source": [ + "other_message = \"So, are you ready for the Nillimania that is about to come?\"\n", + "# Signature\n", + "r, s, m = ecnil.retrieve_signature(client_id)\n", + "# Public parameters (client's public key, ecdsa setup)\n", + "Y = ecnil.clients[client_id - 1].get_open(str(client_id)+\"th_client_x_pk\")\n", + "q = ecnil.q\n", + "G = ecnil.ecdsa.G\n", + "# Verify\n", + "verify_ecdsa_signature(other_message, r, s, Y, q, G)" + ] + }, + { + "cell_type": "markdown", + "id": "8168b958-cb16-4d8a-a72e-0a734f41699d", + "metadata": {}, + "source": [ + "### Debugging\n", + "\n", + "We can also take a closer look into the network, in case we want to explore more. For that, we have available the print() function, which shows the elements owned by each party." + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "id": "39569617-19f9-4427-9db9-0c56e4f772b8", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Network(N=3, q=115792089210356248762697446949407573529996955224135760342422259061068512044369,\n", + " nodes=[\n", + " Node(id=1,\n", + " shares_db={\n", + " 1th_client_x_enc_sh_exp: ,\n", + " 1th_client_m_lambda_exp_sh_exp: 18805008961205655853974338859205055368831044864697545644490465153571438711799,\n", + " 1th_client_signature_sh_base: 91779520132695752504598595459510400413067395296456213887423493859572385422122,\n", + " },\n", + " public_keys={\n", + " 0: ,\n", + " },\n", + " open_db={\n", + " 1th_client_x_sk: 102469784307885151344434448408108083163496555990607993030079296257768877177482,\n", + " 1th_client_x_pk: ,\n", + " 1th_client_k_r: 31753913312844769919847511464927299861472164513026404815794344435752523133091,\n", + " 1th_client_k_inv_sk: 99007062913722544630459242921033930766972195770224818402003123704021659134276,\n", + " 1th_client_gap_particle_m: 3105980911072955919053240646545559247084242804202022487995108928842220696094,\n", + " }\n", + " )\n", + " Node(id=2,\n", + " shares_db={\n", + " 1th_client_x_enc_sh_exp: ,\n", + " 1th_client_m_lambda_exp_sh_exp: 108606047286522208847578453720076266696012634649496781964928173924361506779525,\n", + " 1th_client_signature_sh_base: 14097041990847478370465638810235809055897373211739469599149103775486801032157,\n", + " },\n", + " public_keys={\n", + " 0: ,\n", + " },\n", + " open_db={\n", + " 1th_client_x_sk: 102469784307885151344434448408108083163496555990607993030079296257768877177482,\n", + " 1th_client_x_pk: ,\n", + " 1th_client_k_r: 31753913312844769919847511464927299861472164513026404815794344435752523133091,\n", + " 1th_client_k_inv_sk: 99007062913722544630459242921033930766972195770224818402003123704021659134276,\n", + " 1th_client_gap_particle_m: 3105980911072955919053240646545559247084242804202022487995108928842220696094,\n", + " }\n", + " )\n", + " Node(id=3,\n", + " shares_db={\n", + " 1th_client_x_enc_sh_exp: ,\n", + " 1th_client_m_lambda_exp_sh_exp: 79008080502481446358856101831037787726509978488153796354999416201748522695236,\n", + " 1th_client_signature_sh_base: 57889872377523500056536556248862903975761972917443548592733728878154084316939,\n", + " },\n", + " public_keys={\n", + " 0: ,\n", + " },\n", + " open_db={\n", + " 1th_client_x_sk: 102469784307885151344434448408108083163496555990607993030079296257768877177482,\n", + " 1th_client_x_pk: ,\n", + " 1th_client_k_r: 31753913312844769919847511464927299861472164513026404815794344435752523133091,\n", + " 1th_client_k_inv_sk: 99007062913722544630459242921033930766972195770224818402003123704021659134276,\n", + " 1th_client_gap_particle_m: 3105980911072955919053240646545559247084242804202022487995108928842220696094,\n", + " }\n", + " )\n", + " ]\n", + ")\n", + " clients=[\n", + " Client(id=1,\n", + " shares_db={\n", + " 1th_client_m_lambda_exp_sh_exp_node_1: 18805008961205655853974338859205055368831044864697545644490465153571438711799,\n", + " 1th_client_m_lambda_exp_sh_exp_node_2: 108606047286522208847578453720076266696012634649496781964928173924361506779525,\n", + " 1th_client_m_lambda_exp_sh_exp_node_3: 79008080502481446358856101831037787726509978488153796354999416201748522695236,\n", + " m_lambda_exp: 90627047539853062297711447460911536261356702778212363621995796218612956142192,\n", + " gap_lambda_exp: 91551342479687100116633344204445557531301978216457851867025998830604586230946,\n", + " 1th_client_signature_sh_base_node_1: 91779520132695752504598595459510400413067395296456213887423493859572385422122,\n", + " 1th_client_signature_sh_base_node_2: 14097041990847478370465638810235809055897373211739469599149103775486801032157,\n", + " 1th_client_signature_sh_base_node_3: 57889872377523500056536556248862903975761972917443548592733728878154084316939,\n", + " sig_gap: 47974345290710482168903343569201539914729786201503471736884067452144758726849,\n", + " },\n", + " public_keys={\n", + " 0: ,\n", + " },\n", + " private_keys=>,\n", + " open_db={\n", + " 1th_client_x_pk: ,\n", + " 1th_client_k_r: 31753913312844769919847511464927299861472164513026404815794344435752523133091,\n", + " 1th_client_s: 45641033648832070136841209516832737108002048145377673994477584593492361851821,\n", + " 1th_client_message: This is one of many releases we are going to have in the next few months.,\n", + " }\n", + " )\n", + " ]\n", + ")\n" + ] + } + ], + "source": [ + "ecnil.print()" + ] + }, + { + "cell_type": "markdown", + "id": "cc9c6ed7-040e-4f88-91c3-32ea4a26a107", + "metadata": {}, + "source": [ + "In debug mode, we have access to all elements:" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "id": "29dcc5c5-2263-414f-a69e-07d492b62695", + "metadata": {}, + "outputs": [], + "source": [ + "# DKG protocol for ECDSA\n", + "ecnil_debug.distributed_key_generation_protocol(client_id)" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "id": "c87dc2a5-ac98-4160-81cf-fcc64df10b49", + "metadata": {}, + "outputs": [], + "source": [ + "# Preprocessing phase (message independent)\n", + "ecnil_debug.ts_prep_protocol(client_id)" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "id": "1277a72f-8618-4e53-809f-1ee1f108df19", + "metadata": {}, + "outputs": [], + "source": [ + "message = \"Stay tunned. Join the Telegram chat. Join the Discord channel. Be happy.\"" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "id": "9c52fc18-1be2-424a-af1e-9cb8a2548e7a", + "metadata": {}, + "outputs": [], + "source": [ + "# Online phase (message dependent)\n", + "ecnil_debug.ts_online_protocol(message, client_id)" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "id": "b6c23905-4bcf-47ef-b707-431900d818de", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Network(N=3, q=115792089210356248762697446949407573529996955224135760342422259061068512044369,\n", + " nodes=[\n", + " Node(id=1,\n", + " shares_db={\n", + " randomsh_node_1: 26769440234656992175755532478958052042591858765515471507714033651589557161275,\n", + " randomsh_node_2: 101114896275779667892858842630820945984339520389178341454003745582001833565950,\n", + " randomsh_node_3: 65295368212334320364822732577293115580823338745394808319313401203985502047244,\n", + " random_minus_1th_client_x_sh_exp: 21285557804254400517499316140863911282953027731008870766837031342101421986995,\n", + " 1th_client_x_enc_sh_exp: ,\n", + " 1th_client_k_lambda_sh_exp: 64808101058331855648098966258074768495522068627855889258210392032045767117789,\n", + " 1th_client_k_lambda_sh_base: 45914160945743234463835765639491558729542954125290225663180923306814251949178,\n", + " 1th_client_lambda_1_lambda_sh_exp: 110261921268401980199777795756244310703391358903969707276878833519586775656461,\n", + " 1th_client_lambda_1_lambda_sh_base: 108264603285536304734821164728289517992500267790392063330713261312757662392280,\n", + " 1th_client_lambda_2_lambda_sh_exp: 28987819239560065081237892375136555651251977493306094401896942309427202983421,\n", + " 1th_client_lambda_2_lambda_sh_base: 393993427082182993888611869452796442059787351768810467093084945818453065664,\n", + " 1th_client_k_inv_lambda_sh_exp: 50983988152024393114598480691332805034474886596279871084211867029022744926579,\n", + " 1th_client_m_lambda_exp_sh_exp: 59277933116377587085179315064911505668916472307689836192666966490564030729882,\n", + " 1th_client_k_inv_minus_lambda_2_sh_exp: 21996168912464328033360588316196249383222909102973776682314924719595541943158,\n", + " 1th_client_signature_sh_base: 58525477711371260240334972186282972087472544951891672017409127407100153430357,\n", + " },\n", + " public_keys={\n", + " 0: ,\n", + " },\n", + " open_db={\n", + " pow_share_node_1: 72378868255854391565760175650872293318597189536572400527922097502433058411878,\n", + " pow_share_node_2: 45872515933530664101602257524600538219508489031264254994253819321752776775816,\n", + " pow_share_node_3: 29014508172579319062670438431715762317330725523345422883913336857780815595948,\n", + " 1th_client_x_sk: 64217921291331773899722626677277272882064119440032325106726282285652751432437,\n", + " ec_pow_share_node_1: ,\n", + " ec_pow_share_node_2: ,\n", + " ec_pow_share_node_3: ,\n", + " 1th_client_x_pre_pk: ,\n", + " 1th_client_x_pk: ,\n", + " 1th_client_k_sk: 106690866310991489943153000702872351126038800689406262426052359671490366155073,\n", + " 1th_client_k_r: 63311173584842841245925798104795648111620391687666830997321134344588083074523,\n", + " 1th_client_k_inv_sk: 107906334498891160288150417749665075810783183991392731713096401950578281812997,\n", + " 1th_client_gap_particle_m: 69322805609917834763653403789850653945489715737259872208282176384775167087218,\n", + " }\n", + " )\n", + " Node(id=2,\n", + " shares_db={\n", + " randomsh_node_1: 72655457485939389274262646238908281476957104058738201607336845534626284859858,\n", + " randomsh_node_2: 55421664265852990662402318574735773655011024051024860751083583489548130800173,\n", + " randomsh_node_3: 44198230652869082014345749838424845600089020503892065861360713945075170694589,\n", + " random_minus_1th_client_x_sh_exp: 17978783689643860262869736622490840897246670992032037432728437473306693866471,\n", + " 1th_client_x_enc_sh_exp: ,\n", + " 1th_client_k_lambda_sh_exp: 109046234982584279344320108345538172606301505649245143407617166312984970462065,\n", + " 1th_client_k_lambda_sh_base: 16041270280715525560014858200313600029015059473186017401036961446635721672877,\n", + " 1th_client_lambda_1_lambda_sh_exp: 14637587812657205465089060314861794398045935656251157915659263317920692221790,\n", + " 1th_client_lambda_1_lambda_sh_base: 26927290265932762621234385455146593833467909324594318258475853624783121420370,\n", + " 1th_client_lambda_2_lambda_sh_exp: 111387604221397619557478295762098362007647046471814770560600604854995628920443,\n", + " 1th_client_lambda_2_lambda_sh_base: 15648644956979592409131495285480833237606061740472442534334164023606449679144,\n", + " 1th_client_k_inv_lambda_sh_exp: 6745854227771969418377338603869400923695449574890616934805092748083541582303,\n", + " 1th_client_m_lambda_exp_sh_exp: 7891733584885236046711721710992393474350486081360540980854170569837150639487,\n", + " 1th_client_k_inv_minus_lambda_2_sh_exp: 11150339216730598623596489791178612446045358327211606716626746954156424706228,\n", + " 1th_client_signature_sh_base: 99130481358755645570913642446332077523645332139473716675869609686163579526448,\n", + " },\n", + " public_keys={\n", + " 0: ,\n", + " },\n", + " open_db={\n", + " pow_share_node_1: 72378868255854391565760175650872293318597189536572400527922097502433058411878,\n", + " pow_share_node_2: 45872515933530664101602257524600538219508489031264254994253819321752776775816,\n", + " pow_share_node_3: 29014508172579319062670438431715762317330725523345422883913336857780815595948,\n", + " 1th_client_x_sk: 64217921291331773899722626677277272882064119440032325106726282285652751432437,\n", + " ec_pow_share_node_1: ,\n", + " ec_pow_share_node_2: ,\n", + " ec_pow_share_node_3: ,\n", + " 1th_client_x_pre_pk: ,\n", + " 1th_client_x_pk: ,\n", + " 1th_client_k_sk: 106690866310991489943153000702872351126038800689406262426052359671490366155073,\n", + " 1th_client_k_r: 63311173584842841245925798104795648111620391687666830997321134344588083074523,\n", + " 1th_client_k_inv_sk: 107906334498891160288150417749665075810783183991392731713096401950578281812997,\n", + " 1th_client_gap_particle_m: 69322805609917834763653403789850653945489715737259872208282176384775167087218,\n", + " }\n", + " )\n", + " Node(id=3,\n", + " shares_db={\n", + " randomsh_node_1: 80467104596292784995039830657604396254463789914559555668663681347644787618471,\n", + " randomsh_node_2: 21701464042755607759296886007989738374567587485349163154590663265294001864960,\n", + " randomsh_node_3: 100601572627545979661556637006394789708817365091180024552239177876511754079724,\n", + " random_minus_1th_client_x_sh_exp: 16317407426751689361152032901068957344284849244363366938662721251946077534328,\n", + " 1th_client_x_enc_sh_exp: ,\n", + " 1th_client_k_lambda_sh_exp: 67687894616425587027423848566794623807840017597405412684236998307161151652932,\n", + " 1th_client_k_lambda_sh_base: 27833039705320897040551638431985464923603433345228226403929657914049433624966,\n", + " 1th_client_lambda_1_lambda_sh_exp: 17981540287082509631932088256043972474997713697789597119888467700607069813447,\n", + " 1th_client_lambda_1_lambda_sh_base: 2949600072862649594898126130838370818644756422341793042150301207517336252843,\n", + " 1th_client_lambda_2_lambda_sh_exp: 89970861924037096172137585573787705605377182569231545460797204963324757607552,\n", + " 1th_client_lambda_2_lambda_sh_base: 114532674433452481534518996941820426545049999443331494478889058485967324359656,\n", + " 1th_client_k_inv_lambda_sh_exp: 48104194593930661735273598382612949722156937626730347658185260753907360391436,\n", + " 1th_client_m_lambda_exp_sh_exp: 85669434903508096659355936822838596282837731295195009804125466007768221466379,\n", + " 1th_client_k_inv_minus_lambda_2_sh_exp: 73925421880249814325833459758232817646776710281634562539810314851651114828252,\n", + " 1th_client_signature_sh_base: 26294689748181801575956195524248545264537826091618411177415420384036597551421,\n", + " },\n", + " public_keys={\n", + " 0: ,\n", + " },\n", + " open_db={\n", + " pow_share_node_1: 72378868255854391565760175650872293318597189536572400527922097502433058411878,\n", + " pow_share_node_2: 45872515933530664101602257524600538219508489031264254994253819321752776775816,\n", + " pow_share_node_3: 29014508172579319062670438431715762317330725523345422883913336857780815595948,\n", + " 1th_client_x_sk: 64217921291331773899722626677277272882064119440032325106726282285652751432437,\n", + " ec_pow_share_node_1: ,\n", + " ec_pow_share_node_2: ,\n", + " ec_pow_share_node_3: ,\n", + " 1th_client_x_pre_pk: ,\n", + " 1th_client_x_pk: ,\n", + " 1th_client_k_sk: 106690866310991489943153000702872351126038800689406262426052359671490366155073,\n", + " 1th_client_k_r: 63311173584842841245925798104795648111620391687666830997321134344588083074523,\n", + " 1th_client_k_inv_sk: 107906334498891160288150417749665075810783183991392731713096401950578281812997,\n", + " 1th_client_gap_particle_m: 69322805609917834763653403789850653945489715737259872208282176384775167087218,\n", + " }\n", + " )\n", + " ]\n", + ")\n", + " clients=[\n", + " Client(id=1,\n", + " shares_db={\n", + " 1th_client_m_lambda_exp_sh_exp_node_1: 59277933116377587085179315064911505668916472307689836192666966490564030729882,\n", + " 1th_client_m_lambda_exp_sh_exp_node_2: 7891733584885236046711721710992393474350486081360540980854170569837150639487,\n", + " 1th_client_m_lambda_exp_sh_exp_node_3: 85669434903508096659355936822838596282837731295195009804125466007768221466379,\n", + " m_lambda_exp: 37047012394414671028549526649334921896107734460109626635224344007100890791380,\n", + " 1th_client_gap_lambda_enc_sh_exp_node_1: ,\n", + " 1th_client_gap_lambda_enc_sh_exp_node_2: ,\n", + " 1th_client_gap_lambda_enc_sh_exp_node_3: ,\n", + " gap_lambda_exp: 14593568459178450201196623351708437273742502624114583187528081246402839799107,\n", + " 1th_client_signature_sh_base_node_1: 58525477711371260240334972186282972087472544951891672017409127407100153430357,\n", + " 1th_client_signature_sh_base_node_2: 99130481358755645570913642446332077523645332139473716675869609686163579526448,\n", + " 1th_client_signature_sh_base_node_3: 26294689748181801575956195524248545264537826091618411177415420384036597551421,\n", + " sig_gap: 68158559607952458624507363207456021345658747958848039528271898416231818463857,\n", + " },\n", + " public_keys={\n", + " 0: ,\n", + " },\n", + " private_keys=>,\n", + " open_db={\n", + " 1th_client_x_pk: ,\n", + " 1th_client_k_r: 63311173584842841245925798104795648111620391687666830997321134344588083074523,\n", + " 1th_client_s: 63833539223658105135053461039650923566058911866233596880424355717202875082925,\n", + " 1th_client_message: Stay tunned. Join the Telegram chat. Join the Discord channel. Be happy.,\n", + " }\n", + " )\n", + " ]\n", + ")\n" + ] + } + ], + "source": [ + "ecnil_debug.print()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5b0a2d28-64fb-4f04-b339-a733b7996138", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.10" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..ef071cd --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,21 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + rm _source/modules.rst + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/_source/tecdsa.rst b/docs/_source/tecdsa.rst new file mode 100644 index 0000000..ef57dce --- /dev/null +++ b/docs/_source/tecdsa.rst @@ -0,0 +1,26 @@ +tecdsa module +============= + + +.. automodule:: tecdsa.network + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: tecdsa.setup + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: tecdsa.tecdsa + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: tecdsa.utils + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/_templates/module.rst_t b/docs/_templates/module.rst_t new file mode 100644 index 0000000..2d264b2 --- /dev/null +++ b/docs/_templates/module.rst_t @@ -0,0 +1,8 @@ +{%- if show_headings %} +{{- [basename, "module"] | join(' ') | e | heading }} + +{% endif -%} +.. automodule:: {{ qualname }} +{%- for option in automodule_options %} + :{{ option }}: +{%- endfor %} \ No newline at end of file diff --git a/docs/_templates/package.rst_t b/docs/_templates/package.rst_t new file mode 100644 index 0000000..e77b7d3 --- /dev/null +++ b/docs/_templates/package.rst_t @@ -0,0 +1,36 @@ +{%- macro automodule(modname, options) -%} +.. automodule:: {{ modname }} +{%- for option in options %} + :{{ option }}: +{%- endfor %} +{%- endmacro %} + +{%- macro toctree(docnames) -%} +.. toctree:: + :maxdepth: {{ maxdepth }} +{% for docname in docnames %} + {{ docname }} +{%- endfor %} +{%- endmacro %} + +{{- [pkgname, "module"] | join(" ") | e | heading }} + +{%- if subpackages %} +Subpackages +----------- + +{{ toctree(subpackages) }} +{% endif %} + +{%- if submodules %} +{% if separatemodules %} +{{ toctree(submodules) }} +{% else %} +{%- for submodule in submodules %} +{% if show_headings %} +{{- [submodule, "module"] | join(" ") | e | heading(2) }} +{% endif %} +{{ automodule(submodule, automodule_options) }} +{% endfor %} +{%- endif %} +{%- endif %} \ No newline at end of file diff --git a/docs/_templates/toc.rst_t b/docs/_templates/toc.rst_t new file mode 100644 index 0000000..9ed1320 --- /dev/null +++ b/docs/_templates/toc.rst_t @@ -0,0 +1,7 @@ +{{ header | heading }} + +.. toctree:: + :maxdepth: {{ maxdepth }} +{% for docname in docnames %} + {{ docname }} +{%- endfor %} \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..a517339 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,99 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys +sys.path.insert(0, os.path.abspath('../src')) # Prioritize local module copy. + + +# -- Project information ----------------------------------------------------- + +# The name and version are retrieved from ``pyproject.toml`` in the root +# directory. +import toml +with open('../pyproject.toml') as pyproject_file: + pyproject_data = toml.load(pyproject_file) +project = pyproject_data['project']['name'] +version = pyproject_data['project']['version'] +release = version + +# The copyright year and holder information is retrieved from the +# ``LICENSE`` file. +import re +with open('../LICENSE', 'r') as license_file: + license_string = license_file.read().split('Copyright (c) ')[1] +year = license_string[:4] +author = license_string[5:].split('\n')[0] +copyright = year + ', ' + re.sub(r"\.$", "", author) # Period already in HTML. + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.doctest', + 'sphinx.ext.napoleon', + 'sphinx.ext.intersphinx', + 'sphinx.ext.viewcode', + 'sphinx_autodoc_typehints' +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ['_build'] + +# Options to configure autodoc extension behavior. +autodoc_member_order = 'bysource' +autodoc_typehints = 'description' +autodoc_typehints_description_target = 'documented' +autodoc_preserve_defaults = True + +# Allow references/links to definitions found in the Python documentation +# and in the documentation for this package's dependencies. + +def rtd_url_for_installed_version(name, subdomain=None): + subdomain = name if subdomain is None else subdomain + prefix = 'https://' + subdomain + '.readthedocs.io/en/' + + if sys.version_info.major == 3 and sys.version_info.minor == 7: + import pkg_resources + return prefix + pkg_resources.get_distribution(name).version + + import importlib.metadata + return prefix + importlib.metadata.version(name) + +intersphinx_mapping = { + 'python': ('https://docs.python.org/3', None) +} + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'sphinx_rtd_theme' + +# Theme options for Read the Docs. +html_theme_options = { + 'display_version': True, + 'collapse_navigation': True, + 'navigation_depth': 1, + 'titles_only': True +} diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..725c23d --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,10 @@ +.. tecdsa documentation master file, created by + sphinx-quickstart on Sat Nov 4 13:05:19 2023. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + + +.. include:: ../README.rst + +.. include:: toc.rst + diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000..f7c13ef --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,36 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +if "%1" == "" goto help + +del _source\modules.rst +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/toc.rst b/docs/toc.rst new file mode 100644 index 0000000..56030bd --- /dev/null +++ b/docs/toc.rst @@ -0,0 +1,5 @@ +.. toctree:: + :maxdepth: 4 + :caption: Contents: + + _source/tecdsa \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..bf20ff9 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,61 @@ +[project] +name = "tecdsa" +version = "0.1.0" +description = """\ + Pure-Python implementation of a threshold ecdsa signature scheme \ + based on a secure multi-party computation (MPC) protocol for evaluating \ + arithmetic sum-of-products expressions via a non-interactive computation phase. \ + """ +license = {text = "MIT"} +authors = [ + {name = "Nillion"}, + {email = "engineering@nillion.com"} +] +readme = "README.rst" +requires-python = ">=3.10" +dependencies = [ + "gmpy2~=2.1.5", + "llvmlite~=0.41.1", + "mpmath~=1.3.0", + "numba~=0.58.1", + "numpy~=1.25.2", + "phe~=1.5.0", + "pycryptodome~=3.19.0", + "sympy~=1.12", + "typing_extensions~=4.8.0" +] + +[project.urls] +Repository = "https://github.com/nillion-oss/tecdsa" +Documentation = "https://tecdsa.readthedocs.io" + +[project.optional-dependencies] +docs = [ + "toml~=0.10.2", + "sphinx~=4.2.0", + "sphinx-rtd-theme~=1.0.0", + "sphinx-autodoc-typehints~=1.12.0" +] +test = [ + "pytest~=7.2", + "pytest-cov~=4.0" +] +lint = [ + "pylint~=2.17.0" +] +coveralls = [ + "coveralls~=3.3.1" +] +publish = [ + "build~=0.10", + "twine~=4.0" +] + +[build-system] +requires = [ + "setuptools>=68.0" +] +build-backend = "setuptools.build_meta" + +[tool.pytest.ini_options] +addopts = "--doctest-modules --ignore=docs --cov=tecdsa --cov-report term-missing" \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..16f8c2a --- /dev/null +++ b/requirements.txt @@ -0,0 +1,9 @@ +gmpy2==2.1.5 +llvmlite==0.41.1 +mpmath==1.3.0 +numba==0.58.1 +numpy==1.25.2 +phe==1.5.0 +pycryptodome==3.19.0 +sympy==1.12 +typing_extensions==4.8.0 diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..2cc5c8a --- /dev/null +++ b/setup.cfg @@ -0,0 +1,4 @@ +[nosetests] +exe=True +with-doctest=1 +tests=test/ \ No newline at end of file diff --git a/src/tecdsa/__init__.py b/src/tecdsa/__init__.py new file mode 100644 index 0000000..04eb6b9 --- /dev/null +++ b/src/tecdsa/__init__.py @@ -0,0 +1,4 @@ +"""Allow the users to access the class and functions directly""" +from tecdsa.tecdsa import ThresholdSignature +from tecdsa.setup import DSASetup, ECDSASetup +from tecdsa.utils import verify_ecdsa_signature, verify_dsa_signature diff --git a/src/tecdsa/network.py b/src/tecdsa/network.py new file mode 100644 index 0000000..e9ffcd9 --- /dev/null +++ b/src/tecdsa/network.py @@ -0,0 +1,161 @@ +from dataclasses import dataclass, field +from typing import Dict, List, Union + +from .utils import add, generate_additive_shares + +@dataclass +class Node: + """ Represents a node in the network.""" + + id: int + """Identifier for the node.""" + shares_db: Dict[str, int] = field(default_factory=dict) + """Database for holding shares.""" + open_db: Dict[str, int] = field(default_factory=dict) + """Database for holding open values.""" + he_public_keys: Dict[int, int] = field(default_factory=dict) + """Dictionary for holding homomorphic encryption public keys.""" + + def get_share(self, label: str) -> None: + """Retrieve a share from the 'shares_db'.""" + return self.shares_db[label] + + def get_open(self, label: str) -> None: + """Retrieve an open value from the 'open_db'.""" + return self.open_db[label] + + def set_share(self, value, label: str) -> None: + """Set a share in the 'shares_db'.""" + self.shares_db[label] = value + + def set_open(self, value, label: str) -> None: + """Set an open value in the 'open_db'.""" + self.open_db[label] = value + + def delete_share(self, label: str) -> None: + """Delete a share from the 'shares_db'.""" + self.shares_db.pop(label) + + def delete_open(self, label: str) -> None: + """Delete an open value from the 'open_db'.""" + self.open_db.pop(label) + +@dataclass +class Client(Node): + """Represents a client node in the network, inheriting from the 'Node' class.""" + he_private_key: int = field(default=0) + +class Network: + """Represents a network of nodes and clients. + + Manages the interactions and cryptographic operations within the network, + including sharing secrets, broadcasting values, and reconstructing shared values. + """ + + nodes: List[Node] + """List of nodes in the network.""" + clients: List[Client] + """List of clients in the network.""" + q: int + """Prime field.""" + h: int + """Multiplicative field generator.""" + + def __init__(self, N, q, h=2, C=1): + """ + Initialize the network with 'N' nodes, prime field 'q', field generator 'h', and 'C' clients. + + Parameters: + N (int): Number of nodes in the network. + q (int): Prime field. + h (int): Multiplicative field generator (default is 2). + C (int): Number of clients in the network (default is 1). + """ + self.nodes = [Node(i+1) for i in range(N)] + self.clients = [Client(i+1) for i in range(C)] + self.N = N + self.q = q + self.h = h + + def print(self): + """Print a readable representation of the network, including nodes and clients with their databases.""" + print(f"Network(N={len(self.nodes)}, q={self.q},") + print(" nodes=[") + for node in self.nodes: + print(f" Node(id={node.id},") + print(" shares_db={") + for key, value in node.shares_db.items(): + print(f" {key}: {value},") + print(" },") + print(" public_keys={") + for key, value in node.he_public_keys.items(): + print(f" {key}: {value},") + print(" },") + print(" open_db={") + for key, value in node.open_db.items(): + print(f" {key}: {value},") + print(" }") + print(" )") + print(" ]\n)") + print(" clients=[") + for client in self.clients: + print(f" Client(id={client.id},") + print(" shares_db={") + for key, value in client.shares_db.items(): + print(f" {key}: {value},") + print(" },") + print(" public_keys={") + for key, value in client.he_public_keys.items(): + print(f" {key}: {value},") + print(" },") + print(f" private_keys={client.he_private_key},") + print(" open_db={") + for key, value in client.open_db.items(): + print(f" {key}: {value},") + print(" }") + print(" )") + print(" ]\n)") + + def reconstruct_local(self, type_share: str, get_label: str, save_label: str, party: Union[Client, Node]) -> None: + """Locally reconstruct exponent share ('exp') or base ('base') shared value.""" + + type_label = "_sh_exp" if type_share == "exp" else "_sh_base" + p = (self.q - 1) if type_share == "exp" else self.q + shares = [party.get_share(get_label+type_label+"_node_"+str(node.id)) for node in self.nodes] + reconstructed = add(shares, p) + party.set_share(reconstructed, save_label) + + def broadcast(self, element: int, label: str) -> None: + """Send element to all nodes.""" + + for node in self.nodes: + node.open_db[label] = element + + def send(self, type_share: str, label: str, party: Union[Client, Node], delete=False) -> None: + """Send exponent ('exp') or base ('base') share to party.""" + + type_label = "_sh_exp" if type_share == "exp" else "_sh_base" + for node in self.nodes: + sh_node = node.get_share(label+type_label) + sh_label = label+type_label+"_node_"+str(node.id) + party.set_share(sh_node, sh_label) + node.delete_share(label+type_label) if delete else None + + def share(self, secret: int, size: int, label: str) -> None: + """Share secret value with all""" + + shares = generate_additive_shares(secret, self.N, size) + for node in self.nodes: + node.set_share(shares[node.id - 1], label) + + def reveal(self, type_share: str, get_label: str, save_label: str, party: Union[Client, Node]) -> None: + """Send exponent ('exp') or base ('base') share to party.""" + + self.send(type_share, get_label, party) + self.reconstruct_local(type_share, get_label, save_label, party) + + + + + + diff --git a/src/tecdsa/setup.py b/src/tecdsa/setup.py new file mode 100644 index 0000000..13b77e1 --- /dev/null +++ b/src/tecdsa/setup.py @@ -0,0 +1,123 @@ + +from sympy.ntheory.residue_ntheory import primitive_root +from Crypto.PublicKey import DSA +from Crypto.PublicKey import ECC +from Crypto.PublicKey.ECC import EccPoint +from dataclasses import dataclass +from typing import Optional + + +def get_generator(q): + """ + Get the generator (primitive root) for a given prime number q. + + Parameters: + q (int): A prime number for which the generator is needed. + + Returns: + int: The generator (primitive root) for the given prime number. + + Example: + >>> get_generator(23) + 5 + """ + return int(primitive_root(q)) + +@dataclass +class DSASetup: + """ + Dataclass representing a DSA (Digital Signature Algorithm) setup. + + Example: + setup = DSASetup.generate_dsa_setup() + """ + + p: int + """The DSA modulus.""" + q: int + """The order of the subgroup.""" + g: int + """A generator of the subgroup.""" + h: int + """A generator of the field :math:`\mathbb{Z}_q`.""" + + def generate_dsa_setup(): + """Generate a DSA setup based on system parameters.""" + key = DSA.generate(2048) + g = int(key._key['g']) + p = int(key._key['p']) + q = int(key._key['q']) + h = get_generator(q) + return DSASetup(p, q, g, h) + +@dataclass +class ECDSASetup: + """ + Dataclass representing an ECDSA (Elliptic Curve Digital Signature Algorithm) setup. + + Example: + setup = ECDSASetup.generate_ecdsa_setup() + """ + + curve: str + """The name of the elliptic curve.""" + p: Optional[int] = None + """The finite field of the elliptic curve.""" + q: Optional[int] = None + """The order of the elliptic curve group.""" + G: Optional[EccPoint] = None + """A base point on the elliptic curve.""" + h: Optional[int] = None + """A generator of field :math:`\mathbb{Z}_q`.""" + + def generate_ecdsa_setup(self): + """ + Generate an ECDSA setup for the specified elliptic curve. + + Returns: + ECDSASetup: An instance of ECDSASetup with generated parameters. + + Raises: + ValueError: If the specified curve is not supported. + + Example: + >>> setup = ECDSASetup(curve='P-256').generate_ecdsa_setup() + """ + + supported_curves = self.supported_curves() + curve = self.curve + if curve not in supported_curves: + raise ValueError("{} is not one of the specified curves. \ + Please choose one of the following curves:\n \ + ['P-192', 'P-224', 'P-256', 'P-384', 'P-521']".format(curve)) + p = int(ECC._curves[curve].p) + q = int(ECC._curves[curve].order) + G = ECC._curves[curve].G + h = get_generator(int(q)) + return ECDSASetup(curve, p, q, G, h) + + @staticmethod + def supported_curves(): + """ + Get a list of supported elliptic curves. + + Returns: + List[str]: A list of supported elliptic curve names. + + Example: + >>> supported_curves = ECDSASetup.supported_curves() + >>> print(supported_curves) + ['P-192', 'P-224', 'P-256', 'P-384', 'P-521'] + """ + + return ['P-192', 'P-224', 'P-256', 'P-384', 'P-521'] + + def print_supported_curves(self): + """ + Print the list of supported elliptic curves. + """ + + supported_curves = self.supported_curves() + print("Supported Elliptic Curves: ", supported_curves) + + diff --git a/src/tecdsa/tecdsa.py b/src/tecdsa/tecdsa.py new file mode 100644 index 0000000..e939ce8 --- /dev/null +++ b/src/tecdsa/tecdsa.py @@ -0,0 +1,751 @@ +from Crypto.Hash import SHA256 +from phe import paillier +from typing import List + +from .utils import add, add_ec, multiply, rand, egcd, verify_dsa_signature, verify_ecdsa_signature +from .setup import DSASetup, ECDSASetup +from .network import Network, Client + +class ThresholdSignature(Network): + clients: List[Client] + + def __init__(self, N, C, setup=None, debug=False): + + self.debug = debug + if setup is None: + self.dsa = DSASetup.generate_dsa_setup() + self.setup = DSASetup + super().__init__(N, self.dsa.q, self.dsa.h) + elif type(setup) == DSASetup: + self.dsa = setup + self.setup = DSASetup + super().__init__(N, self.dsa.q, self.dsa.h) + elif type(setup) == ECDSASetup: + self.ecdsa = setup.generate_ecdsa_setup() + self.setup = ECDSASetup + super().__init__(N, self.ecdsa.q, self.ecdsa.h) + else: + raise TypeError("Invalid type provided. " + "Please use either 'DSASetup' or 'ECDSASetup' types." + ) + + # Generate public and private keys for the paillier homomorphic encryption scheme + for i in range(C): + pub_key, priv_key = paillier.generate_paillier_keypair() + self.clients[i].he_private_key = priv_key + for node in self.nodes: + node.he_public_keys[i] = pub_key + for client in self.clients: + client.he_public_keys[i] = pub_key + + + def get_lambda(self, labels: list[str]) -> None: + """ + Emulates the generation of LAMBDA pairs :math:`([h^{\gamma}], [\gamma])` between all nodes. + + Parameters: + labels (list[str]): A list of labels for which lambda values will be generated + and stored. + + Returns: + None + """ + + n = len(labels) + h = self.h + q = self.q + q_minus_one = q - 1 + for l in range(n): + # Locally generate lambda + alpha = rand(q_minus_one) + h_alpha = pow(h, alpha, q) + + self.share(alpha, q_minus_one, labels[l]+"_lambda_sh_exp") + self.share(h_alpha, q, labels[l]+"_lambda_sh_base") + + def rss_protocol(self, size: int, label: str) -> None: + """ + Random Secret Sharing (RSS) Protocol. + + This function implements a one-round RSS protocol. The goal is to share a random + secret value among a group of nodes using a specific label for the shares. + + Parameters: + size (int): The maximum size of the random secret to be generated and shared. + label (str): A label to identify the shared secrets and their associated operations. + + Returns: + None + """ + + # Round 1 + for node in self.nodes: + # Step 1: locally generate random secret + random_element = rand(size) + # Step 2: share random secret with all nodes + self.share(random_element, size, label+"sh_node_"+str(node.id)) + # All local + for node in self.nodes: + # DB management + list_of_shares = [ + node.get_share(label + "sh_node_" + str(other_node.id)) + for other_node in self.nodes + ] + # Step 3: add locally all shares + random_sum = add(list_of_shares, size) + # DB management + sh_label = label+"_sh_exp" + node.set_share(random_sum, sh_label) + if not self.debug: + [node.delete_share(label + "sh_node_" + str(other_node.id)) + for other_node in self.nodes] + + def pow_share_protocol(self, base_type: str, get_label: str, save_label: str) -> None: + """ + Compute a power-sharing protocol among a group of nodes. + + This function implements a one-round protocol to securely compute :math:`b^{s}` where + the exponent is a secret shared element between the nodes. + + Parameters: + base_type (str): The type of base used: 'exp', when base to be used is self.h; + 'base', when the base to be used is self.dsa.g. Note: 'base' + option can only be use for the DSA setup. + get_label (str): The label to retrieve shares of 's' from nodes. + save_label (str): The label to save the final result to. + + Returns: + None + """ + + if base_type not in ["exp", "base"]: + raise ValueError("{} is not one of the specified base types.\ + Please choose one of the following:\n \ + ['exp', 'base']".format(base_type)) + + prime = self.q if base_type == "exp" else self.dsa.p + + # Round 1 + for node in self.nodes: + # DB management + exponent = node.get_share(get_label+"_sh_"+base_type) + # Step 1: compute base^share + if base_type == "exp": + h_exp = pow(self.h, exponent, prime) + else: + h_exp = pow(self.dsa.g, exponent, prime) + # Step 2: Broadcast base^share to nodes + self.broadcast(h_exp, "pow_share_node_"+str(node.id)) + + # All local + for node in self.nodes: + # DB management + base_exps = [ + node.get_open("pow_share_node_"+str(other_node.id)) + for other_node in self.nodes + ] + # Step 3: multiply locally all powers of shares + val = multiply(base_exps, prime) + # DB management + node.set_open(val, save_label) + if not self.debug: + [node.delete_open("pow_share_node_"+str(other_node.id)) + for other_node in self.nodes] + + def ec_pow_share_protocol(self, get_label: str, save_label: str) -> None: + """ + Execute an elliptic curve (EC) version of power-sharing protocol. + + This function implements a one-round protocol to securely compute + :math:`scalar\cdot G` where the scalar is a secret shared element between the nodes. + + Parameters: + get_label (str): The label used to retrieve scalar shares from nodes. + save_label (str): The label used to save the result of the power-sharing protocol. + + Returns: + None + """ + + # Round 1 + for node in self.nodes: + # DB management + scalar_sh = node.get_share(get_label+"_sh_base") + # Step 1: + sh_G = scalar_sh * self.ecdsa.G + # Step 2: + self.broadcast(sh_G, "ec_pow_share_node_"+str(node.id)) + + # All local + for node in self.nodes: + # DB management + base_exps = [ + node.get_open("ec_pow_share_node_"+str(other_node.id)) + for other_node in self.nodes + ] + # Step 3: add locally all point shares + val = add_ec(base_exps) + # DB management + node.set_open(val, save_label) + if not self.debug: + [node.delete_open("ec_pow_share_node_"+str(other_node.id)) + for other_node in self.nodes] + + def subtract_exp_shares_local(self, label_a: str, label_b: str, label_r: str) -> None: + """ + Subtract the shares of the exponent of two labels and store the result in another label. + + Parameters: + label_a (str): The label for the first operand. + label_b (str): The label for the second operand. + label_r (str): The label where the result is stored. + + Returns: + None + """ + + q_minus_one = self.q - 1 + + for node in self.nodes: + # DB management + share_a = node.get_share(label_a+"_sh_exp") + share_b = node.get_share(label_b+"_sh_exp") + # Local operation: subtraction + share_r = (share_a - share_b) % q_minus_one + # DB management + label = label_r+"_sh_exp" + node.set_share(share_r, label) + + def pow_local(self, label_base: str, label_exponent: str, label_result: str) -> None: + """ + Compute the power of a base saved in open database raised to an exponent and store the result. + + Parameters: + label_base (str): The label for the base. + label_exponent (str): The label for the exponent. + label_result (str): The label for the element where the result is stored. + + Returns: + None + """ + + for node in self.nodes: + # DB management + base = node.get_open(label_base) + exponent = node.get_open(label_exponent) + # Local operation: power + result = pow(base, exponent, self.dsa.p) + # DB management + node.set_open(result, label_result) + + def key_agreement_protocol(self, label: str, delete=True) -> None: + """ + Perform a key agreement protocol to derive a mask of the secret key and the + corresponding public key. + + Parameters: + label (str): The label of the pair associated with the secret key mask. + delete (bool, optional): Whether to delete intermediate data after the protocol. + Defaults to True. + + Returns: + None + """ + + q_minus_one = self.q - 1 + + # Round 1 + # Step 1: + random_label = "random" + self.rss_protocol(q_minus_one, random_label) + + # Round 2 + # Step 2: + random_minus_label = random_label + "_minus_" + label + self.subtract_exp_shares_local(random_label, label + "_lambda", random_minus_label) + base_type_exp = "exp" + self.pow_share_protocol(base_type_exp, random_minus_label, label + "_sk") + + if self.setup == DSASetup: + # Step 3: + base_type_base = "base" + self.pow_share_protocol(base_type_base, label + "_lambda", label + "_pre_pk") + # Step 4: + self.pow_local(label + "_pre_pk", label + "_sk", label + "_pk") + else: + # Step 3: + self.ec_pow_share_protocol(label + "_lambda", label + "_pre_pk") + # Step 4: + self.ec_mult_local(label + "_pre_pk", label + "_sk", label + "_pk") + + # DB management + ## Option only for testing purposes + if delete: + [node.delete_share(random_minus_label+"_sh_exp") for node in self.nodes] + [node.delete_share(random_label+"_sh_exp") for node in self.nodes] + [node.delete_open(label + "_pre_pk") for node in self.nodes] + + def ec_mult_local(self, label_ec_point: str, label_scalar: str, label_result: str) -> None: + """ + Compute the multiplication of a scalar value with an elliptic point curve + and store the result. + + Parameters: + label_ec_point (str): The label for the elliptic curve point. + label_scalar (str): The label for the scalar. + label_result (str): The label for the element where the result is stored. + + Returns: + None + """ + + for node in self.nodes: + # DB management + ec_point = node.get_open(label_ec_point) + scalar = node.get_open(label_scalar) + # Local operation: mult + result = scalar * ec_point + # DB management + node.set_open(result, label_result) + + def encrypt_and_delete_exp_sh_local(self, label: str, client_id: int) -> None: + """ + Encrypt the share of the exponent element of the LAMBDA pair and delete the original + LAMBDA pair. + + Parameters: + label (str): The label for LAMBDA pair. + client_id (int): Client id. + + Returns: + None + """ + + for node in self.nodes: + # DB management + clear_share = node.get_share(label+"_lambda_sh_exp") + # Local operation: + ## Encrypt share + enc_sh_val = node.he_public_keys[client_id - 1].encrypt(clear_share) + ## Delete lambda pair + node.delete_share(label+"_lambda_sh_exp") + node.delete_share(label+"_lambda_sh_base") + # DB management + sh_label = label+"_enc_sh_exp" + node.set_share(enc_sh_val, sh_label) + + def send_public_key_to_client(self, label: str, client: Client) -> None: + """ + Nodes send public key to client. + + Parameters: + label (str): The label for LAMBDA pair. + client_id (int): Client id. + + Returns: + None + """ + + all_y = [node.get_open(label+"_pk") for node in self.nodes] + # Check if all elements in the list are equal + are_all_equal = all(y == all_y[0] for y in all_y) + if are_all_equal: + client.set_open(all_y[0], label+"_pk") + else: + raise PublicKeyDisagreement("Abort.") + + def distributed_key_generation_protocol(self, client_id: int, label=None) -> None: + """ + Execute a distributed key generation protocol for a specific client. + + Parameters: + client_id (int): The unique identifier for the client. + label (str, optional): A custom label associated with the client. Defaults to None. + + Returns: + None + """ + + # Check there exist a client + client = next((client for client in self.clients if client.id == client_id), None) + if client == None: + raise TypeError(f"Client with id {client_id} is not part of the network.") + label = str(client_id)+"th_client_"+str(label) if label else str(client_id)+"th_client_"+"x" + delete = not self.debug + # Step 1 + self.get_lambda([label]) + + # Step 2 + self.key_agreement_protocol(label, delete=delete) + + # Step 3 + self.send_public_key_to_client(label, client) + + # Step 4 + self.encrypt_and_delete_exp_sh_local(label, client_id) + + + def compute_r_local(self, label: str, client: Client, delete=True) -> None: + """ + Compute r. + + Parameters: + label (str): The label of the r element. + client (Client): A client. + + Returns: + None + """ + + for node in self.nodes: + # DB management + R = node.get_open(label + "_pk") + # Local operation + r = R % self.q if self.setup == DSASetup else int(R.x) + # DB management + node.set_open(r, label + "_r") + node.delete_open(label + "_pk") + client.set_open(r, label + "_r") + + def invert_masked_factor_local(self, label) -> None: + """ + Invert a masked factor. + + Parameters: + label (str): The label of the masked factor to be inverted. + + Returns: + None + """ + + for node in self.nodes: + # DB management + masked_factor = node.get_open(label+"_sk") + share = node.get_share(label+"_lambda_sh_exp") + # Local operation + ## Invert masked factor + inv_masked_factor = egcd(masked_factor, self.q) + ## Invert share + inv_share = -share % (self.q - 1) + # DB management + node.set_open(inv_masked_factor, label+"_inv_sk") + sh_inv_label = label+"_inv_lambda_sh_exp" + node.set_share(inv_share, sh_inv_label) + + def encrypt_and_add_to_sk_local( + self, + label: str, + save_label: str, + client_id: int, + delete=True + ) -> None: + """ + Encrypt share and add ecrypted value to the encrypted share secret key blinding exponent. + + Parameters: + label (str): The label of the share to be encrypted. + save_label (str): The label used to save the result of the encrypted addition. + client_id (int): The unique identifier for the client. + + Returns: + None + """ + + for node in self.nodes: + # DB management + clear_share = node.get_share(label+"_sh_exp") + enc_lambda_sk = node.get_share(str(client_id)+"th_client_x_enc_sh_exp") + # Local operation + ## Encrypt value from label + encrypted_share_value = node.he_public_keys[client_id - 1].encrypt(clear_share) + ## Add encrypted values + enc_gap_value = encrypted_share_value + enc_lambda_sk + # DB management + enc_gap_label = str(client_id)+"th_client_"+save_label+"_enc_sh_exp" + node.set_share(enc_gap_value, enc_gap_label) + node.delete_share(label+"_sh_exp") if delete else None + + def decrypt_and_reconstruct_local( + self, + get_label: str, + save_label: str, + client: Client + ) -> None: + """ + Decryption and reconstruction executed by the client. + + Parameters: + get_label (str): The label of the shares to be dencrypted and reconstructed. + save_label (str): The label used to save the result. + client_id (int): The unique identifier for the client. + + Returns: + None + """ + + + # DB management + enc_sh_per_node = [client.get_share(get_label+"_sh_exp_node_"+str(node.id)) for node in self.nodes] + # Local operation + ## Decrypt + dec_sh_per_node = [client.he_private_key.decrypt(enc_sh) for enc_sh in enc_sh_per_node] + q_minus_one = self.q - 1 + ## Reconstruct and take the symmetric value + dec_val = -add(dec_sh_per_node, q_minus_one) % q_minus_one + # DB management + dec_label = save_label + "_exp" + client.set_share(dec_val, dec_label) + [client.delete_share(get_label+"_sh_exp_node_"+str(node.id)) for node in self.nodes] if not self.debug else None + + + def ts_prep_protocol(self, client_id): + """ + Execute the preprocessing phase of the threshold signature protocol for a specific client. + + Parameters: + client_id (int): The unique identifier for the client. + + Returns: + None + + Raises: + TypeError: If the client with the provided 'client_id' is not part of the network. + KeyError: If the public key is not complete for the specified client. + """ + + # Check there exist a client + client = next((client for client in self.clients if client.id == client_id), None) + if client == None: + raise TypeError(f"Client with id {client_id} is not part of the network.") + # Check there exist client public key triple (, y, Enc([\lambda_x])) + try: + for node in self.nodes: + node.get_open(str(client_id)+"th_client_x_sk") + node.get_open(str(client_id)+"th_client_x_pk") + node.get_share(str(client_id)+"th_client_x_enc_sh_exp") + except KeyError: + print(f"Public key triple (, y, Enc([\lambda_x])) from DKG is not complete for client {client_id}. Generate it first using 'distributed_key_generation_protocol({client_id})'") + + # Client independent preprocessing + # Step 1 + label_k = str(client_id)+"th_client_k" + label_lambda_1 = str(client_id)+"th_client_lambda_1" + label_lambda_2 = str(client_id)+"th_client_lambda_2" + self.get_lambda([label_k, label_lambda_1, label_lambda_2]) + # Step 2 + self.key_agreement_protocol(label_k) + # Step 3 + self.compute_r_local(label_k, client) + # Step 4: invert k + self.invert_masked_factor_local(label_k) + # Step 5: compute m share + self.subtract_exp_shares_local(label_lambda_1 + "_lambda", label_k + "_inv_lambda", str(client_id)+"th_client_m_lambda_exp") + + # Client dependent preprocessing + # Step 6: reveal to client + get_label = str(client_id)+"th_client_m_lambda_exp" + save_label_m = "m_lambda_exp" + type_share = "exp" + self.reveal(type_share, get_label, save_label_m, client) + # Step 7: encrypt and share to client + ## Compute difference lambda_inv_k - lambda_2 + self.subtract_exp_shares_local(label_k + "_inv_lambda", label_lambda_2 + "_lambda", str(client_id)+"th_client_k_inv_minus_lambda_2") + ## Encrypt the difference and add to the encrypted secret key lambda + label_gap = "gap_lambda" + delete = not self.debug + self.encrypt_and_add_to_sk_local(str(client_id)+"th_client_k_inv_minus_lambda_2", label_gap, client_id, delete=delete) + ## Send it to the client + label_send_gap = str(client_id)+"th_client_"+label_gap+"_enc" + type_share = "exp" + self.send(type_share, label_send_gap, client, delete=True) + # Step 8: client decrypts and reconstructs + self.decrypt_and_reconstruct_local(label_send_gap, label_gap, client) + + def broadcast_masked_message_digest(self, message: str, client: Client) -> None: + """ + Broadcasts a masked message digest to the client. + + Parameters: + message (str): The input message to be hashed and masked. + client (Client): An instance of the client participating in the protocol. + + Returns: + None + """ + + # DB management + m_lambda_exp = client.get_share("m_lambda_exp") + gap_lambda_exp = client.get_share("gap_lambda_exp") + # Local operation + ## Compute message + message_digest = SHA256.new(data=message.encode("utf-8")) + m = int(message_digest.hexdigest(), 16) % self.q + ## Compute gap particle + particle = m * pow(self.h, -m_lambda_exp, self.q) % self.q + gap_particle = particle * pow(self.h, gap_lambda_exp, self.q) % self.q + # Broadcast + self.broadcast(gap_particle, str(client.id)+"th_client_gap_particle_m") + + def sign_local(self, client_id: int, delete=True): + """ + Sign a message locally and optionally delete intermediate shares. + + Parameters: + client_id (int): The unique identifier of the client. + delete (bool, optional): A flag indicating whether to delete intermediate shares after signing (default is True). + + Returns: + None + """ + q = self.q + + for node in self.nodes: + # DB management + sh_lambda_1 = node.get_share(str(client_id)+"th_client_lambda_1_lambda_sh_base") + sh_lambda_2 = node.get_share(str(client_id)+"th_client_lambda_2_lambda_sh_base") + p_k_inv = node.get_open(str(client_id)+"th_client_k_inv_sk") + p_x = node.get_open(str(client_id)+"th_client_x_sk") + p_r = node.get_open(str(client_id)+"th_client_k_r") + p_gap_m = node.get_open(str(client_id)+"th_client_gap_particle_m") + # Local operation + s_h_gap_left = (sh_lambda_1 * p_k_inv) % q + s_h_gap_left = (s_h_gap_left * p_gap_m) % q + s_h_gap_right = (sh_lambda_2 * p_k_inv) % q + s_h_gap_right = (s_h_gap_right * p_r) % q + s_h_gap_right = (s_h_gap_right * p_x) % q + s_h_gap = (s_h_gap_left + s_h_gap_right) % q + # DB management + node.set_share(s_h_gap, str(client_id)+"th_client_signature_sh_base") + if delete: + node.delete_share(str(client_id)+"th_client_lambda_1_lambda_sh_base") + node.delete_share(str(client_id)+"th_client_lambda_1_lambda_sh_exp") + node.delete_share(str(client_id)+"th_client_lambda_2_lambda_sh_base") + node.delete_share(str(client_id)+"th_client_lambda_2_lambda_sh_exp") + node.delete_share(str(client_id)+"th_client_k_lambda_sh_exp") + node.delete_share(str(client_id)+"th_client_k_lambda_sh_base") + node.delete_share(str(client_id)+"th_client_k_inv_lambda_sh_exp") + node.delete_open(str(client_id)+"th_client_k_sk") + + def reconstruct_and_verify_sig(self, message: str, get_label: str, client: Client, delete=True): + """ + Reconstructs and verifies a client's digital signature for a given message. + + Parameters: + message (str): The input message for which the signature is to be reconstructed and verified. + get_label (str): The label used to retrieve the client's signature share from the database. + client (Client): An instance of the client for which the signature is reconstructed and verified. + delete (bool, optional): A flag indicating whether to delete intermediate shares after verification (default is True). + + Returns: + None: This function doesn't return a value; it verifies the signature and potentially deletes intermediate shares. + """ + q = self.q + if self.setup == DSASetup: + p = self.dsa.p + g = self.dsa.g + else: + G = self.ecdsa.G + + # DB management + gap_lambda_exp = client.get_share("gap_lambda_exp") + y = client.get_open(str(client.id)+"th_client_x_pk") + r = client.get_open(str(client.id)+"th_client_k_r") + s_h_gap = client.get_share(get_label) + # Compute signature + s = (s_h_gap * pow(self.h, -gap_lambda_exp, self.q)) % self.q + # Verify signature + verify_dsa_signature(message, r, s, y, p, q, g) if self.setup == DSASetup else verify_ecdsa_signature(message, r, s, y, q, G) + # DB management + signature_label = str(client.id)+"th_client_s" + client.set_open(s, signature_label) + message_label = str(client.id)+"th_client_message" + client.set_open(message, message_label) + + + + def ts_online_protocol(self, message: str, client_id: int) -> None: + """ + Executes the online phase of the threshold signature protocol for a specific client. + + Parameters: + message (str): The message to be signed by the client. + client_id (int): The unique identifier of the client participating in the protocol. + + Returns: + None + """ + + # Check there exist a client + client = next((client for client in self.clients if client.id == client_id), None) + if client == None: + raise TypeError(f"Client with id {client_id} is not part of the network.") + # Check there 'ts_prep_protocol' was run + try: + for node in self.nodes: + node.get_open(str(client_id)+"th_client_k_inv_sk") + node.get_open(str(client_id)+"th_client_k_r") + client.get_share("gap_lambda_exp") + client.get_share("m_lambda_exp") + except KeyError: + print(f"The preprocessing phase was not run for client {client_id}.") + + + # Step 11: compute digest, mask it, include gap and broadcast the result to all nodes + self.broadcast_masked_message_digest(message, client) + + # Step 12a: all nodes compute locally the shares corresponding to clients + delete = not self.debug + self.sign_local(client_id, delete=delete) + + # Step 12b: reveal to client + get_label = str(client_id)+"th_client_signature" + save_label_m = "sig_gap" + type_share = "base" + self.reveal(type_share, get_label, save_label_m, client) + + # Step 13 verify + get_label = "sig_gap" + self.reconstruct_and_verify_sig(message, get_label, client) + + def print_signature(self, client_id: int) -> None: + + # Check there exist a client + client = next((client for client in self.clients if client.id == client_id), None) + if client == None: + raise TypeError(f"Client with id {client_id} is not part of the network.") + # Check there exist client public key triple (, y, Enc([\lambda_x])) + try: + r = client.get_open(str(client.id)+"th_client_k_r") + s = client.get_open(str(client.id)+"th_client_s") + m = client.get_open(str(client.id)+"th_client_message") + except KeyError: + print(f"Signature not generated for client {client_id}.'") + + print(f" Client(id={client_id},") + print(f" r={r},") + print(f" s={s},") + print(f" m={m},\n )") + + + def retrieve_signature(self, client_id: int) -> (int, int, str): + + # Check there exist a client + client = next((client for client in self.clients if client.id == client_id), None) + if client == None: + raise TypeError(f"Client with id {client_id} is not part of the network.") + # Check there exist client public key triple (, y, Enc([\lambda_x])) + try: + r = client.get_open(str(client.id)+"th_client_k_r") + s = client.get_open(str(client.id)+"th_client_s") + m = client.get_open(str(client.id)+"th_client_message") + except KeyError: + print(f"Signature not generated for client {client_id}.'") + + return r, s, m + + +class PublicKeyDisagreement(Exception): + def __init__(self, message): + self.message = f"Public keys are not consistent. {message}" + super().__init__(self.message) + diff --git a/src/tecdsa/utils.py b/src/tecdsa/utils.py new file mode 100644 index 0000000..e3f495f --- /dev/null +++ b/src/tecdsa/utils.py @@ -0,0 +1,227 @@ +import unittest +import random +from Crypto.PublicKey.ECC import EccPoint +from Crypto.Hash import SHA256 + +rand = random.randrange + +def add(values: list[int], size: int) -> int: + """ + Calculate the sum of a list of integers modulo 'size'. + + Args: + values (list[int]): A list of integers to be summed. + size (int): The modulo value. + + Returns: + int: The sum of the integers in 'values' modulo 'size'. + + Examples: + >>> add([2, 4, 6], 5) + 2 + >>> add([3, 7, 10], 4) + 0 + """ + + result = 0 + for v in values: + result = (result + v) % size + return result + +def add_ec(points: list[EccPoint]) -> int: + """ + Calculate the sum of a list of elliptic curve points. + + Args: + points (list[EccPoint]): A list of elliptic curve points to be summed. + + Returns: + EccPoint: The sum of the points. + """ + + result = points[0] + for v in points[1:]: + result = (result + v) + return result + + +def generate_additive_shares(secret: int, n: int, size: int) -> list[int]: + """ + Generates additive secret shares for a given secret value, using modular arithmetic. + + Args: + secret (int): The secret value to be shared. + n (int): The number of shares to generate. + size (int): The modulus value for modular arithmetic. + + Returns: + List[int]: A list of additive secret shares. + + Example: + >>> random.seed(0) + >>> generate_additive_shares(26, 3, 2**5) + [8, 24, 26] + """ + shares = [rand(size) for _ in range(n-1)] + last_sh = (secret - add(shares, size)) % size + shares = [last_sh] + shares + + return shares + +def multiply(values: list[int], size: int) -> int: + """ + Calculate the product of a list of values, taking the modulus 'size' at each step of multiplication. + + Args: + values (list[int]): List of integers to be multiplied. + size (int): Modulus value to prevent the result from growing too large. + + Returns: + int: The product of the values, computed modulo 'size'. + + Example: + >>> multiply([2, 4, 6], 5) + 3 + """ + + result = 1 + for v in values: + result = (result * v) % size + return result + +def egcd(a: int, p: int) -> int: + """ + Calculate the modular multiplicative inverse of 'a' modulo 'p' using the extended Euclidean algorithm. + + Args: + a (int): Integer for which the modular inverse is calculated. + p (int): Modulus value for the modulo operation. + + Returns: + int: Modular multiplicative inverse of 'a' modulo 'p'. + """ + q = p + x, last_x = 0, 1 + y, last_y = 1, 0 + while q != 0: + quot = a // q + a, q = q, a % q + x, last_x = last_x - quot * x, x + y, last_y = last_y - quot * y, y + return last_x % p + +def hash(message: str, q: int): + """ + Computes the hash of the mesage using SHA256. + + Parameters: + message (str): The message to verify the signature for. + q (int): Field size to embbed digest. + + Returns: + m (int): hash of message. + """ + + message_digest = SHA256.new(data=message.encode("utf-8")) + m = int(message_digest.hexdigest(), 16) % q + + return m + +def verify_dsa_signature(message: int, r: int, s: int, y: int, p: int, q: int, g: int) -> None: + """ + Verify a Digital Signature Algorithm (DSA) signature. + + Parameters: + message (str): The message to verify the signature for. + r (int): The 'r' component of the DSA signature. + s (int): The 's' component of the DSA signature. + y (int): The public key 'y' associated with the signer. + p (int): The prime modulus used in DSA. + q (int): A prime divisor of 'p'. + g (int): The generator of the subgroup used in DSA. + + Returns: + None: If the signature is valid, no exceptions are raised. + + Raises: + VerifySignatureError: If the signature verification fails due to one of the following reasons: + 'r' or 's' is greater than or equal to 'q'; the calculated 'v' does not match 'r'. + """ + + if r >= q or s >= q: + raise VerifySignatureError("Signature out of bound q. Abort.") + m = hash(message, q) + w = egcd(s, q) + u1 = (m * w) % q + u2 = (r * w) % q + v = (pow(g, u1, p) * pow(y, u2, p) % p) % q + if v != r: + raise VerifySignatureError("Signature mismatch. Abort.") + +def verify_ecdsa_signature(message: int, r: int, s: int, Y: EccPoint, q: int, G: EccPoint) -> None: + """ + Verify an Elliptic Curve Digital Signature Algorithm (ECDSA) signature. + + Parameters: + message (str): The message to verify the signature for. + r (int): The 'r' component of the DSA signature. + s (int): The 's' component of the DSA signature. + Y (EccPoint): The public key 'y' associated with the signer. + q (int): Order of the Elliptic Curve group. + G (EccPoint): The generator of the Elliptic Curve group. + + Returns: + None: If the signature is valid, no exceptions are raised. + + Raises: + VerifySignatureError: If the signature verification fails due to one of the following reasons: + 'r' or 's' is greater than or equal to 'q'; the calculated 'v' does not match 'r'. + """ + + if r >= q or s >= q: + raise VerifySignatureError("Signature out of bound q. Abort.") + m = hash(message, q) + w = egcd(s, q) + u1 = (m * w) % q + u2 = (r * w) % q + V = u1 * G + u2 * Y + v = int(V.x) + if v != r: + raise VerifySignatureError("Signature mismatch. Abort.") + + +class VerifySignatureError(Exception): + def __init__(self, message): + self.message = f"Signature verification failed. {message}" + super().__init__(self.message) + + +class TestUtils(unittest.TestCase): + + def setUp(self): + random.seed(0) + + def test_add(self): + + result = add([2,4,6], 5) + self.assertEqual(result, 2) + + def test_generate_additive_shares(self): + + secret = 29 + nr_shares = 3 + size = 2**5 + shares = generate_additive_shares(secret, nr_shares, size) + computed_secret = add(shares, size) + self.assertEqual(secret, computed_secret) + + def test_multiply(self): + + result = multiply([2,4,6], 5) + self.assertEqual(result, 3) + + + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_tbecdsa.py b/tests/test_tbecdsa.py new file mode 100644 index 0000000..d3536af --- /dev/null +++ b/tests/test_tbecdsa.py @@ -0,0 +1,302 @@ +from phe import paillier +from random import seed +import time + +from tecdsa.utils import add, verify_ecdsa_signature +from tecdsa.setup import DSASetup, ECDSASetup +from tecdsa.tecdsa import ThresholdSignature + + +import unittest + + +class TestSig(unittest.TestCase): + + + def setUp(self): + seed(0) + N = 10; C = 1 + # DSA setup + manual_dsa_setup = True + if manual_dsa_setup: + p = 16987220163402883416449356930313946536948708368250187300904484990592060034399925373558684845589122357155245527725130833676269318205326149268410610561367974110319706088695097181729621805806503895242356834473026604015120592348890617701675387428807728090415853765634415325555621648235338466349957683063948139664640253794461972428009207212678775162641560258829400418398089166048123240989061901894801714545511227607162820358567023001939860545346903340718565981921863209491363679301897076211852920953764568258713784702534591771430944194446014721504677270780731006777716425745362753111142293413901721847152726957943402872047 + q = 18615201011907374064080708325380633467600489307695820739772219003499; + g = 1440750739647392583923353319762863205412412735463771135600354281498545556560554285032144083567469348458038821471561505478727536048946568600306333026876282227253717145726280535747755789389298351217147981114134445522434273687560094566805116079958307881112688486903459951003823567315217837479260063662350297462681218852749673559083125704211468000331391500202590446254295826681166987302499736009857926325072657165790463352645906484288271010829609728496608136458748019477999277575560554800692468233144862681997749241093911491601564874805253852956797072221469937174346581408575685518457073416604892562933677029344283366064 + h = 2 + dsa_setup = DSASetup(p, q, g, h) + self.fnil = ThresholdSignature(N, C, setup=dsa_setup) + self.fnil_debug = ThresholdSignature(N, C, setup=dsa_setup, debug=True) + else: + self.fnil = ThresholdSignature(N, C) + self.fnil_debug = ThresholdSignature(N, C, debug=True) + # ECDSA setup + ecdsa_setup = ECDSASetup(curve="P-256") + self.ecnil = ThresholdSignature(N, C, setup=ecdsa_setup) + self.ecnil_debug = ThresholdSignature(N, C, setup=ecdsa_setup, debug=True) + + def test_generate_dsa_setup(self): + if False: + dsa_setup, h = DSASetup.generate_dsa_setup() + + def test_rss(self): + size = self.fnil.q - 1 + self.fnil.rss_protocol(size, "ka_share") + + def test_pow_share_protocol(self): + # setup + vals = [23, 4839, 12341235234] + for val in vals: + q = self.fnil.q + h_val = pow(self.fnil.h, val, q) + # Share + label = "ka_x" + self.fnil.share(val, q - 1, label + "_sh_exp") + + # compute expint + base_type = "exp" + self.fnil.pow_share_protocol(base_type, label, label) + + # test + ## check all nodes have the same expint value. + results_gr = [] + for node in self.fnil.nodes: + results_gr.append(node.open_db[label]) + first_result_gr = results_gr[0] + self.assertTrue(all(element == first_result_gr for element in results_gr)) + ## check they are correct + self.assertEqual(h_val, first_result_gr) + + def test_ec_pow_share_protocol(self): + # setup + vals = [23, 4839, 12341235234] + for val in vals: + q = self.ecnil.q + G = self.ecnil.ecdsa.G + val_G = val * G + # Share + label = "ka_x" + self.ecnil.share(val, q, label + "_sh_base") + + self.ecnil.ec_pow_share_protocol(label, label) + + # test + ## check all nodes have the same value. + results_gr = [] + for node in self.ecnil.nodes: + results_gr.append(node.open_db[label]) + first_result_gr = results_gr[0] + self.assertTrue(all(element == first_result_gr for element in results_gr)) + ## check they are correct + self.assertEqual(val_G, first_result_gr) + + def test_key_agreement(self): + label = "x" + self.fnil.get_lambda([label]) + self.fnil.key_agreement_protocol(label, delete=False) + + # Check that y == g^x + ## Reconstruct r + shares = [node.get_share("random"+"_sh_exp") for node in self.fnil.nodes] + r = add(shares, self.fnil.q - 1) + ## Compute x: x = h^r + x = pow(self.fnil.h, r, self.fnil.q) + # check y == g^x + y = self.fnil.nodes[0].open_db[label + "_pk"] + g_x = pow(self.fnil.dsa.g, x, self.fnil.dsa.p) + self.assertEqual(y, g_x) + + def test_ec_key_agreement(self): + label = "x" + self.ecnil.get_lambda([label]) + self.ecnil.key_agreement_protocol(label, delete=False) + + # Check that y == x*G + ## Reconstruct r + shares = [node.get_share("random"+"_sh_exp") for node in self.ecnil.nodes] + r = add(shares, self.ecnil.q - 1) + ## Compute x: x = h^r + x = pow(self.ecnil.h, r, self.ecnil.q) + # check y == x*G + y = self.ecnil.nodes[0].open_db[label + "_pk"] + x_G = x * self.ecnil.ecdsa.G + self.assertEqual(y, x_G) + + def test_paillier_encryption_gf(self): + vals = [12341247] + for val in vals: + q_minus_one = self.fnil.q - 1 + # Share + label = "test_sh_exp" + self.fnil.share(val, q_minus_one,label) + # Collect shares + shares = [node.get_share(label) for node in self.fnil.nodes] + + # Paillier + pub_key, priv_key = paillier.generate_paillier_keypair() + ## Testing sum of elements + start_all = time.time() + start_enc = time.time() + enc_shares = [pub_key.encrypt(val) for val in shares] + finish_enc = time.time() + enc_sum = enc_shares[0] + start_add = time.time() + for enc_element in enc_shares[1:]: + enc_sum = enc_sum + enc_element + finish_add = time.time() + start_dec = time.time() + decrypted_final = priv_key.decrypt(enc_sum) % q_minus_one + finish_dec = time.time() + finish_all = time.time() + enc_time = (finish_enc - start_enc) + add_time = (finish_add - start_add) + dec_time = (finish_dec - start_dec) + all_time = (finish_all - start_all) + n = len(self.fnil.nodes) + + print(f"Time taken to encrypt {n} shares:\n{enc_time} seconds") + print(f"Time taken to add {n} encrypted shares:\n{add_time} seconds") + print(f"Time taken for one decryption:\n{dec_time} seconds") + print(f"Time evaluation for encrypted addition of {n} shares and one decryption (overall time):\n{all_time} seconds") + + final = add(shares, q_minus_one) + self.assertEqual(final, decrypted_final) + + ## Testing scalar multiplication + enc_3 = pub_key.encrypt(3) + enc_4 = pub_key.encrypt(4) + start_multiply = time.time() + enc_42 = enc_3*10 + enc_4*3 + finish_multiply = time.time() + scalar_multiplication_time = finish_multiply - start_multiply + print(f"Scalar multiplication: {scalar_multiplication_time} seconds" ) + + self.assertEqual(42, priv_key.decrypt(enc_42)) + + def test_distributed_key_generation(self): + start_time = time.time() + self.fnil.distributed_key_generation_protocol(1) + end_time = time.time() + n = len(self.fnil.nodes) + elapsed_time_per_party = (end_time - start_time)/n + print(f"Time evaluation for DSA-DKG protocol (computation):\n{elapsed_time_per_party} seconds") + + def test_ec_distributed_key_generation(self): + start_time = time.time() + self.ecnil.distributed_key_generation_protocol(1) + end_time = time.time() + n = len(self.ecnil.nodes) + elapsed_time_per_party = (end_time - start_time)/n + print(f"Time evaluation for ECDSA-DKG protocol (computation):\n{elapsed_time_per_party} seconds") + + def test_error_client_missing_ts_prep_protocol(self): + if False: + self.fnil.ts_prep_protocol(2) + + def test_error_secret_key_missing_ts_prep_protocol(self): + if False: + self.fnil.ts_prep_protocol(1) + + def test_ts_prep_protocol(self): + self.fnil.distributed_key_generation_protocol(1) + start_time = time.time() + self.fnil.ts_prep_protocol(1) + end_time = time.time() + n = len(self.fnil.nodes) + elapsed_time_per_party = (end_time - start_time)/n + print(f"Time evaluation for Prep DSA-Sign protocol (computation):\n{elapsed_time_per_party} seconds") + + def test_ec_ts_prep_protocol(self): + self.ecnil.distributed_key_generation_protocol(1) + start_time = time.time() + self.ecnil.ts_prep_protocol(1) + end_time = time.time() + n = len(self.ecnil.nodes) + elapsed_time_per_party = (end_time - start_time)/n + print(f"Time evaluation for Prep ECDSA-Sign protocol (computation):\n{elapsed_time_per_party} seconds") + + def test_ts_online_protocol(self): + self.fnil.distributed_key_generation_protocol(1) + self.fnil.ts_prep_protocol(1) + message = "Message to sign" + start_time = time.time() + self.fnil.ts_online_protocol(message, 1) + end_time = time.time() + n = len(self.fnil.nodes) + elapsed_time_per_party = (end_time - start_time)/n + print(f"Time evaluation for Online DSA-Sign protocol (computation):\n{elapsed_time_per_party} seconds") + self.fnil.print() + + def test_ec_bs_online_protocol(self): + self.ecnil.distributed_key_generation_protocol(1) + self.ecnil.ts_prep_protocol(1) + message = "Message to sign" + start_time = time.time() + self.ecnil.ts_online_protocol(message, 1) + end_time = time.time() + elapsed_time_per_party = (end_time - start_time) + print(f"Time evaluation for Online ECDSA-Sign protocol (computation):\n{elapsed_time_per_party} seconds") + + def test_print_signature(self): + client_id = 1 + self.fnil.distributed_key_generation_protocol(client_id) + self.fnil.ts_prep_protocol(client_id) + message = "Message to sign" + self.fnil.ts_online_protocol(message, client_id) + self.fnil.print_signature(client_id) + self.fnil.print() + + def test_ec_print_signature(self): + client_id = 1 + self.ecnil.distributed_key_generation_protocol(client_id) + self.ecnil.ts_prep_protocol(client_id) + message = "Message to sign" + self.ecnil.ts_online_protocol(message, client_id) + self.ecnil.print_signature(client_id) + + def test_debug(self): + client_id = 1 + self.fnil_debug.distributed_key_generation_protocol(client_id) + self.fnil_debug.ts_prep_protocol(client_id) + message = "Message to sign" + self.fnil_debug.ts_online_protocol(message, client_id) + self.fnil_debug.print() + + def test_ec_debug(self): + client_id = 1 + self.ecnil_debug.distributed_key_generation_protocol(client_id) + self.ecnil_debug.ts_prep_protocol(client_id) + message = "Message to sign" + self.ecnil_debug.ts_online_protocol(message, client_id) + + def test_no_debug(self): + client_id = 1 + self.fnil.distributed_key_generation_protocol(client_id) + self.fnil.ts_prep_protocol(client_id) + message = "Message to sign" + self.fnil.ts_online_protocol(message, client_id) + self.fnil.print_signature(1) + + def test_ec_no_debug(self): + client_id = 1 + self.ecnil.distributed_key_generation_protocol(client_id) + self.ecnil.ts_prep_protocol(client_id) + message = "Message to sign" + self.ecnil.ts_online_protocol(message, client_id) + self.ecnil.print_signature(1) + + def test_ec_verify(self): + client_id = 1 + self.ecnil.distributed_key_generation_protocol(client_id) + self.ecnil.ts_prep_protocol(client_id) + message = "Message to sign" + self.ecnil.ts_online_protocol(message, client_id) + r, s, m = self.ecnil.retrieve_signature(client_id) + Y = self.ecnil.clients[client_id - 1].open_db[str(client_id)+"th_client_x_pk"] + q = self.ecnil.q + G = self.ecnil.ecdsa.G + verify_ecdsa_signature(message, r, s, Y, q, G) + + + +if __name__ == "__main__": + unittest.main() \ No newline at end of file